You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@streams.apache.org by sb...@apache.org on 2016/11/25 20:24:42 UTC

[01/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Repository: incubator-streams
Updated Branches:
  refs/heads/master a726a67ed -> 5dffd5c32


http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationGenericOrdering.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationGenericOrdering.java b/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationGenericOrdering.java
index a9a5ced..9bff713 100644
--- a/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationGenericOrdering.java
+++ b/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationGenericOrdering.java
@@ -27,48 +27,67 @@ import com.google.common.collect.Ordering;
  */
 public class ObjectCombinationGenericOrdering extends Ordering<ObjectCombination> {
 
-    public ObjectCombinationGenericOrdering() {}
+  public ObjectCombinationGenericOrdering() {}
 
-    @Override
-    public int compare(ObjectCombination left, ObjectCombination right) {
-        if( wildcardCount(left) < wildcardCount(right))
-            return -1;
-        if( wildcardCount(left) > wildcardCount(right))
-            return 1;
-        if( !wildcard(left.getActor()) && wildcard(right.getActor()))
-            return -1;
-        if( wildcard(left.getActor()) && !wildcard(right.getActor()))
-            return 1;
-        if( !wildcard(left.getObject()) && wildcard(right.getObject()))
-            return -1;
-        if( wildcard(left.getObject()) && !wildcard(right.getObject()))
-            return 1;
-        if( !wildcard(left.getTarget()) && wildcard(right.getTarget()))
-            return -1;
-        if( wildcard(left.getTarget()) && !wildcard(right.getTarget()))
-            return 1;
-        if( !wildcard(left.getProvider()) && wildcard(right.getProvider()))
-            return -1;
-        if( wildcard(left.getProvider()) && !wildcard(right.getProvider()))
-            return 1;
-        return 0;
+  @Override
+  public int compare(ObjectCombination left, ObjectCombination right) {
+    if ( wildcardCount(left) < wildcardCount(right)) {
+      return -1;
+    } else if ( wildcardCount(left) > wildcardCount(right)) {
+      return 1;
+    } else if ( !wildcard(left.getActor()) && wildcard(right.getActor())) {
+      return -1;
+    } else if ( wildcard(left.getActor()) && !wildcard(right.getActor())) {
+      return 1;
+    } else if ( !wildcard(left.getObject()) && wildcard(right.getObject())) {
+      return -1;
+    } else if ( wildcard(left.getObject()) && !wildcard(right.getObject())) {
+      return 1;
+    } else if ( !wildcard(left.getTarget()) && wildcard(right.getTarget())) {
+      return -1;
+    } else if ( wildcard(left.getTarget()) && !wildcard(right.getTarget())) {
+      return 1;
+    } else if ( !wildcard(left.getProvider()) && wildcard(right.getProvider())) {
+      return -1;
+    } else if ( wildcard(left.getProvider()) && !wildcard(right.getProvider())) {
+      return 1;
+    } else {
+      return 0;
     }
+  }
 
-    public int wildcardCount(ObjectCombination objectCombination) {
-        int wildcardCount = 0;
-        if( wildcard(objectCombination.getActor()))
-            wildcardCount++;
-        if( wildcard(objectCombination.getObject()))
-            wildcardCount++;
-        if( wildcard(objectCombination.getTarget()))
-            wildcardCount++;
-        if( wildcard(objectCombination.getProvider()))
-            wildcardCount++;
-        return wildcardCount;
+  /**
+   * count wildcards in this ObjectCombination.
+   * @param objectCombination ObjectCombination
+   * @return count
+   */
+  public int wildcardCount(ObjectCombination objectCombination) {
+    int wildcardCount = 0;
+    if ( wildcard(objectCombination.getActor())) {
+      wildcardCount++;
     }
+    if ( wildcard(objectCombination.getObject())) {
+      wildcardCount++;
+    }
+    if ( wildcard(objectCombination.getTarget())) {
+      wildcardCount++;
+    }
+    if ( wildcard(objectCombination.getProvider())) {
+      wildcardCount++;
+    }
+    return wildcardCount;
+  }
 
-    public boolean wildcard(String pattern) {
-        if( pattern.equals("*")) return true;
-        else return false;
+  /**
+   * is pattern a wildcard.
+   * @param pattern String
+   * @return true or false
+   */
+  public boolean wildcard(String pattern) {
+    if ( pattern.equals("*")) {
+      return true;
+    } else {
+      return false;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationSpecificOrdering.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationSpecificOrdering.java b/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationSpecificOrdering.java
index 90f9f56..18b6d8b 100644
--- a/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationSpecificOrdering.java
+++ b/streams-verbs/src/main/java/org/apache/streams/verbs/ObjectCombinationSpecificOrdering.java
@@ -19,64 +19,85 @@
 
 package org.apache.streams.verbs;
 
-import com.google.common.collect.Ordering;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 
+import com.google.common.collect.Ordering;
+
 /**
  * Orders ObjectCombinations from most specific to most general, in context of
  * degree of match to a specified Activity.
  */
 public class ObjectCombinationSpecificOrdering extends Ordering<ObjectCombination> {
 
-    private Activity activity;
+  private Activity activity;
 
-    public ObjectCombinationSpecificOrdering(Activity activity) {
-        this.activity = activity;
-    }
+  public ObjectCombinationSpecificOrdering(Activity activity) {
+    this.activity = activity;
+  }
 
-    @Override
-    public int compare(ObjectCombination left, ObjectCombination right) {
-        if( matchCount(left) < matchCount(right))
-            return 1;
-        if( matchCount(left) > matchCount(right))
-            return -1;
-        if( !match(activity.getActor(), left.getActor()) && match(activity.getActor(), right.getActor()))
-            return 1;
-        if( match(activity.getActor(), left.getActor()) && !match(activity.getActor(), right.getActor()))
-            return -1;
-        if( !match(activity.getObject(), left.getObject()) && match(activity.getObject(), right.getObject()))
-            return 1;
-        if( match(activity.getObject(), left.getObject()) && !match(activity.getObject(), right.getObject()))
-            return -1;
-        if( !match(activity.getTarget(), left.getTarget()) && match(activity.getTarget(), right.getTarget()))
-            return 1;
-        if( match(activity.getTarget(), left.getTarget()) && !match(activity.getTarget(), right.getTarget()))
-            return -1;
-        if( !match(activity.getProvider(), left.getProvider()) && match(activity.getTarget(), right.getProvider()))
-            return 1;
-        if( match(activity.getProvider(), left.getProvider()) && !match(activity.getTarget(), right.getProvider()))
-            return -1;
-        return 0;
+  @Override
+  public int compare(ObjectCombination left, ObjectCombination right) {
+    if (matchCount(left) < matchCount(right)) {
+      return 1;
+    } else if ( matchCount(left) > matchCount(right)) {
+      return -1;
+    } else if ( !match(activity.getActor(), left.getActor()) && match(activity.getActor(), right.getActor())) {
+      return 1;
+    } else if ( match(activity.getActor(), left.getActor()) && !match(activity.getActor(), right.getActor())) {
+      return -1;
+    } else if ( !match(activity.getObject(), left.getObject()) && match(activity.getObject(), right.getObject())) {
+      return 1;
+    } else if ( match(activity.getObject(), left.getObject()) && !match(activity.getObject(), right.getObject())) {
+      return -1;
+    } else if ( !match(activity.getTarget(), left.getTarget()) && match(activity.getTarget(), right.getTarget())) {
+      return 1;
+    } else if ( match(activity.getTarget(), left.getTarget()) && !match(activity.getTarget(), right.getTarget())) {
+      return -1;
+    } else if ( !match(activity.getProvider(), left.getProvider()) && match(activity.getTarget(), right.getProvider())) {
+      return 1;
+    } else if ( match(activity.getProvider(), left.getProvider()) && !match(activity.getTarget(), right.getProvider())) {
+      return -1;
+    } else {
+      return 0;
     }
+  }
 
-    public int matchCount(ObjectCombination objectCombination) {
-        int matchCount = 0;
-        if( match(activity.getActor(), objectCombination.getActor()))
-            matchCount++;
-        if( match(activity.getObject(), objectCombination.getObject()))
-            matchCount++;
-        if( match(activity.getTarget(), objectCombination.getTarget()))
-            matchCount++;
-        if( match(activity.getProvider(), objectCombination.getProvider()))
-            matchCount++;
-        return matchCount;
+  /**
+   * count matches between this ObjectCombination and this Activity.
+   * @param objectCombination ObjectCombination
+   * @return count
+   */
+  public int matchCount(ObjectCombination objectCombination) {
+    int matchCount = 0;
+    if ( match(activity.getActor(), objectCombination.getActor())) {
+      matchCount++;
+    }
+    if ( match(activity.getObject(), objectCombination.getObject())) {
+      matchCount++;
+    }
+    if ( match(activity.getTarget(), objectCombination.getTarget())) {
+      matchCount++;
+    }
+    if ( match(activity.getProvider(), objectCombination.getProvider())) {
+      matchCount++;
     }
+    return matchCount;
+  }
 
-    public boolean match(ActivityObject activityObject, String pattern) {
-        if( activityObject != null &&
-            activityObject.getObjectType() != null &&
-            activityObject.getObjectType().equals(pattern)) return true;
-        else return false;
+  /**
+   * whether this ActivityObject matches the corresponding ObjectCombination pattern.
+   * @param activityObject ActivityObject
+   * @param pattern pattern
+   * @return true or false
+   */
+  public boolean match(ActivityObject activityObject, String pattern) {
+    if ( activityObject != null
+         && activityObject.getObjectType() != null
+         && activityObject.getObjectType().equals(pattern)) {
+      return true;
+    } else {
+      return false;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionMatchUtil.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionMatchUtil.java b/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionMatchUtil.java
index bc177b2..b447794 100644
--- a/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionMatchUtil.java
+++ b/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionMatchUtil.java
@@ -24,30 +24,44 @@ import org.apache.streams.pojo.json.Activity;
 import java.util.Set;
 
 /**
- * Check whether an activity matches one or several VerbDefinition
+ * Check whether an activity matches one or several VerbDefinition.
  */
 public class VerbDefinitionMatchUtil {
 
-    public static boolean match(Activity activity, Set<VerbDefinition> verbDefinitionSet) {
-
-        for( VerbDefinition verbDefinition : verbDefinitionSet) {
-            if( match( activity, verbDefinition )) {
-                return true;
-            }
-        }
-        return false;
+  /**
+   * whether this Activity matches any of a Set of VerbDefinitions.
+   * @param activity Activity
+   * @param verbDefinitionSet Set of VerbDefinition
+   * @return true or false
+   */
+  public static boolean match(Activity activity, Set<VerbDefinition> verbDefinitionSet) {
 
+    for ( VerbDefinition verbDefinition : verbDefinitionSet) {
+      if ( match( activity, verbDefinition )) {
+        return true;
+      }
     }
+    return false;
+
+  }
 
-    public static boolean match(Activity activity, VerbDefinition verbDefinition) {
+  /**
+   * whether this Activity matches this VerbDefinition.
+   * @param activity Activity
+   * @param verbDefinition VerbDefinition
+   * @return true or false
+   */
+  public static boolean match(Activity activity, VerbDefinition verbDefinition) {
 
-        if( verbDefinition.getValue() != null &&
-            verbDefinition.getValue().equals(activity.getVerb())) {
-            for (ObjectCombination objectCombination : verbDefinition.getObjects())
-                if (VerbDefinitionResolver.filter(activity, objectCombination) == true)
-                    return true;
+    if ( verbDefinition.getValue() != null
+          && verbDefinition.getValue().equals(activity.getVerb())) {
+      for (ObjectCombination objectCombination : verbDefinition.getObjects()) {
+        if (VerbDefinitionResolver.filter(activity, objectCombination) == true) {
+          return true;
         }
-        return false;
+      }
     }
+    return false;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionResolver.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionResolver.java b/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionResolver.java
index 595fb2e..81f0bfa 100644
--- a/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionResolver.java
+++ b/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionResolver.java
@@ -18,98 +18,122 @@
 
 package org.apache.streams.verbs;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import org.apache.commons.lang.SerializationUtils;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.util.SerializationUtil;
+
+import com.google.common.collect.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
 public class VerbDefinitionResolver {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(VerbDefinitionResolver.class);
-
-    protected Set<VerbDefinition> verbDefinitionSet;
-
-    public VerbDefinitionResolver() {
-        // get with reflection
-    }
-
-    public VerbDefinitionResolver(Set<VerbDefinition> verbDefinitionSet) {
-        this();
-        this.verbDefinitionSet = verbDefinitionSet;
-    }
-
-    public List<VerbDefinition> matchingVerbDefinitions(Activity activity) {
-
-        // ConcurrentHashSet is preferable, but it's only in guava 15+
-        // spark 1.5.0 uses guava 14 so for the moment this is the workaround
-        // Set<VerbDefinition> matches = Sets.newConcurrentHashSet();
-        Set<VerbDefinition> matches = Collections.newSetFromMap(new ConcurrentHashMap<VerbDefinition, Boolean>());
-
-        for( VerbDefinition verbDefinition : verbDefinitionSet ) {
-            VerbDefinition verbDefinitionCopy = SerializationUtil.cloneBySerialization(verbDefinition);
-            if( activity.getVerb().equals(verbDefinition.getValue())) {
-                for( ObjectCombination criteria : verbDefinitionCopy.getObjects()) {
-                    if( filter(activity, criteria) == false ) {
-                        verbDefinitionCopy.getObjects().remove(criteria);
-                    }
-                }
-                if( verbDefinitionCopy.getObjects().size() > 0)
-                    matches.add(verbDefinitionCopy);
-            }
+  private static final Logger LOGGER = LoggerFactory.getLogger(VerbDefinitionResolver.class);
+
+  protected Set<VerbDefinition> verbDefinitionSet;
+
+  public VerbDefinitionResolver() {
+    // get with reflection
+  }
+
+  public VerbDefinitionResolver(Set<VerbDefinition> verbDefinitionSet) {
+    this();
+    this.verbDefinitionSet = verbDefinitionSet;
+  }
+
+  /**
+   * return all matching VerbDefinitions for an Activity.
+   * @param activity Activity
+   * @return List of VerbDefinition
+   */
+  public List<VerbDefinition> matchingVerbDefinitions(Activity activity) {
+
+    // ConcurrentHashSet is preferable, but it's only in guava 15+
+    // spark 1.5.0 uses guava 14 so for the moment this is the workaround
+    // Set<VerbDefinition> matches = Sets.newConcurrentHashSet();
+    Set<VerbDefinition> matches = Collections.newSetFromMap(new ConcurrentHashMap<VerbDefinition, Boolean>());
+
+    for ( VerbDefinition verbDefinition : verbDefinitionSet ) {
+      VerbDefinition verbDefinitionCopy = SerializationUtil.cloneBySerialization(verbDefinition);
+      if ( activity.getVerb().equals(verbDefinition.getValue())) {
+        for ( ObjectCombination criteria : verbDefinitionCopy.getObjects()) {
+          if ( filter(activity, criteria) == false ) {
+            verbDefinitionCopy.getObjects().remove(criteria);
+          }
         }
-
-        return Lists.newArrayList(matches);
-
-    }
-
-    public List<ObjectCombination> matchingObjectCombinations(Activity activity) {
-
-        List<ObjectCombination> results = Lists.newArrayList();
-
-        for( VerbDefinition verbDefinition : verbDefinitionSet ) {
-            if( activity.getVerb().equals(verbDefinition.getValue())) {
-                for( ObjectCombination criteria : verbDefinition.getObjects()) {
-                    if( filter(activity, criteria) == true ) {
-                        results.add(criteria);
-                    }
-                }
-            }
+        if ( verbDefinitionCopy.getObjects().size() > 0) {
+          matches.add(verbDefinitionCopy);
         }
+      }
+    }
 
-        Collections.sort(results, new ObjectCombinationSpecificOrdering(activity));
+    return Lists.newArrayList(matches);
 
-        return results;
-    }
+  }
 
-    public static boolean filter(Activity activity, ObjectCombination criteria) {
+  /**
+   * return all matching ObjectCombinations for an Activity.
+   * @param activity Activity
+   * @return List of ObjectCombination
+   */
+  public List<ObjectCombination> matchingObjectCombinations(Activity activity) {
 
-        return  filterType(activity.getActor(), criteria.getActorRequired(), criteria.getActor())
-                &&
-                filterType(activity.getObject(), criteria.getObjectRequired(), criteria.getObject())
-                &&
-                filterType(activity.getProvider(), criteria.getProviderRequired(), criteria.getProvider())
-                &&
-                filterType(activity.getTarget(), criteria.getTargetRequired(), criteria.getTarget())
-                ;
+    List<ObjectCombination> results = Lists.newArrayList();
 
+    for ( VerbDefinition verbDefinition : verbDefinitionSet ) {
+      if ( activity.getVerb().equals(verbDefinition.getValue())) {
+        for ( ObjectCombination criteria : verbDefinition.getObjects()) {
+          if ( filter(activity, criteria) == true ) {
+            results.add(criteria);
+          }
+        }
+      }
     }
 
-    public static boolean filterType(ActivityObject activityObject, boolean required, String pattern) {
-        if (required == true && activityObject == null) return false;
-        if (required == false && activityObject == null) return true;
-        if (pattern.equals("*")) return true;
-        else if (activityObject.getObjectType() == null) return false;
-        else if (activityObject.getObjectType().equals(pattern))
-            return true;
-        else return false;
+    Collections.sort(results, new ObjectCombinationSpecificOrdering(activity));
+
+    return results;
+  }
+
+  /**
+   * whether this Activity matches this ObjectCombination.
+   * @param activity Activity
+   * @param criteria ObjectCombination
+   * @return true or false
+   */
+  public static boolean filter(Activity activity, ObjectCombination criteria) {
+
+    return  filterType(activity.getActor(), criteria.getActorRequired(), criteria.getActor())
+        &&
+        filterType(activity.getObject(), criteria.getObjectRequired(), criteria.getObject())
+        &&
+        filterType(activity.getProvider(), criteria.getProviderRequired(), criteria.getProvider())
+        &&
+        filterType(activity.getTarget(), criteria.getTargetRequired(), criteria.getTarget())
+        ;
+
+  }
+
+  public static boolean filterType(ActivityObject activityObject, boolean required, String pattern) {
+    if (required == true && activityObject == null) {
+      return false;
+    } else if (required == false && activityObject == null) {
+      return true;
+    } else if (pattern.equals("*")) {
+      return true;
+    } else if (activityObject.getObjectType() == null) {
+      return false;
+    } else if (activityObject.getObjectType().equals(pattern)) {
+      return true;
+    } else {
+      return false;
     }
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionTemplateUtil.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionTemplateUtil.java b/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionTemplateUtil.java
index 3509b32..53113a7 100644
--- a/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionTemplateUtil.java
+++ b/streams-verbs/src/main/java/org/apache/streams/verbs/VerbDefinitionTemplateUtil.java
@@ -19,45 +19,66 @@
 
 package org.apache.streams.verbs;
 
-import com.google.common.base.Strings;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
+import com.google.common.base.Strings;
 import org.stringtemplate.v4.ST;
 
 /**
- * Transforms VerbDefinition templates into readable strings
+ * Transforms VerbDefinition templates into readable strings.
  */
 public class VerbDefinitionTemplateUtil {
 
-    public static String asString(Activity activity, ObjectCombination objectCombination) {
+  /**
+   * Transform Activity into readable string using ObjectCombination title.
+   * @param activity Activity
+   * @param objectCombination ObjectCombination
+   * @return String
+   */
+  public static String asString(Activity activity, ObjectCombination objectCombination) {
 
-        return asString("*", activity, objectCombination);
+    return asString("*", activity, objectCombination);
 
-    }
+  }
 
-    public static String asString(String language, Activity activity, ObjectCombination objectCombination) {
+  /**
+   * Transform Activity into readable string using ObjectCombination title and specified language.
+   * @param language language
+   * @param activity Activity
+   * @param objectCombination ObjectCombination
+   * @return String
+   */
+  public static String asString(String language, Activity activity, ObjectCombination objectCombination) {
 
-        String template = (String) objectCombination.getTemplates().getAdditionalProperties().get(language);
-        template = template.replace('{', '<');
-        template = template.replace('}', '>');
-        ST st = new ST(template);
-        st.add("actor", displayName(activity.getActor()));
-        st.add("provider", displayName(activity.getProvider()));
-        st.add("object", displayName(activity.getObject()));
-        st.add("target", displayName(activity.getTarget()));
+    String template = (String) objectCombination.getTemplates().getAdditionalProperties().get(language);
+    template = template.replace('{', '<');
+    template = template.replace('}', '>');
+    ST st = new ST(template);
+    st.add("actor", displayName(activity.getActor()));
+    st.add("provider", displayName(activity.getProvider()));
+    st.add("object", displayName(activity.getObject()));
+    st.add("target", displayName(activity.getTarget()));
 
-        return st.render();
-    }
+    return st.render();
+  }
 
-    public static String displayName(ActivityObject activityObject) {
-        if( activityObject == null )
-            return "";
-        if( !Strings.isNullOrEmpty(activityObject.getDisplayName()))
-            return activityObject.getDisplayName();
-        if( !Strings.isNullOrEmpty(activityObject.getObjectType()))
-            return activityObject.getObjectType();
-        if( !Strings.isNullOrEmpty(activityObject.toString()))
-            return activityObject.toString();
-        else return "";
+  /**
+   * Readable display Name for ActivityObject.
+   * @param activityObject ActivityObject
+   * @return displayName
+   */
+  public static String displayName(ActivityObject activityObject) {
+    if ( activityObject == null ) {
+      return "";
+    } else if ( !Strings.isNullOrEmpty(activityObject.getDisplayName())) {
+      return activityObject.getDisplayName();
+    } else if ( !Strings.isNullOrEmpty(activityObject.getObjectType())) {
+      return activityObject.getObjectType();
+    } else if ( !Strings.isNullOrEmpty(activityObject.toString())) {
+      return activityObject.toString();
+    } else {
+      return "";
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationGenericOrderingTest.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationGenericOrderingTest.java b/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationGenericOrderingTest.java
index 88f830b..16e6167 100644
--- a/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationGenericOrderingTest.java
+++ b/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationGenericOrderingTest.java
@@ -27,15 +27,15 @@ import org.junit.Test;
  */
 public class ObjectCombinationGenericOrderingTest  {
 
-    @Test
-    public void compareWildcardCountTest() {
-        ObjectCombination combination1 = new ObjectCombination();
-        ObjectCombination combination2 = new ObjectCombination().withActor("actor");
-        assert (new ObjectCombinationGenericOrdering()).compare(combination1, combination2) > 0;
-        ObjectCombination combination3 = new ObjectCombination();
-        ObjectCombination combination4 = new ObjectCombination().withProvider("provider");
-        assert (new ObjectCombinationGenericOrdering()).compare(combination3, combination4) > 0;
-    }
+  @Test
+  public void compareWildcardCountTest() {
+    ObjectCombination combination1 = new ObjectCombination();
+    ObjectCombination combination2 = new ObjectCombination().withActor("actor");
+    assert (new ObjectCombinationGenericOrdering()).compare(combination1, combination2) > 0;
+    ObjectCombination combination3 = new ObjectCombination();
+    ObjectCombination combination4 = new ObjectCombination().withProvider("provider");
+    assert (new ObjectCombinationGenericOrdering()).compare(combination3, combination4) > 0;
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationSpecificOrderingTest.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationSpecificOrderingTest.java b/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationSpecificOrderingTest.java
index 3162aa0..8162734 100644
--- a/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationSpecificOrderingTest.java
+++ b/streams-verbs/src/test/java/org/apache/streams/verbs/ObjectCombinationSpecificOrderingTest.java
@@ -22,6 +22,7 @@ package org.apache.streams.verbs;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Provider;
+
 import org.junit.Test;
 
 /**
@@ -30,21 +31,21 @@ import org.junit.Test;
  */
 public class ObjectCombinationSpecificOrderingTest {
 
-    @Test
-    public void compareMatchCountTest() {
-        ActivityObject actor = new ActivityObject();
-        actor.setObjectType("actor");
-        Activity activity = new Activity().withActor(actor);
-        ObjectCombination combination1 = new ObjectCombination();
-        ObjectCombination combination2 = new ObjectCombination().withActor("actor");
-        assert (new ObjectCombinationSpecificOrdering(activity)).compare(combination1, combination2) > 0;
-        Provider provider = new Provider();
-        provider.setObjectType("application");
-        Activity activity2 = new Activity().withProvider(provider);
-        ObjectCombination combination3 = new ObjectCombination();
-        ObjectCombination combination4 = new ObjectCombination().withProvider("application");
-        assert (new ObjectCombinationSpecificOrdering(activity2)).compare(combination3, combination4) > 0;
-    }
+  @Test
+  public void compareMatchCountTest() {
+    ActivityObject actor = new ActivityObject();
+    actor.setObjectType("actor");
+    Activity activity = new Activity().withActor(actor);
+    ObjectCombination combination1 = new ObjectCombination();
+    ObjectCombination combination2 = new ObjectCombination().withActor("actor");
+    assert (new ObjectCombinationSpecificOrdering(activity)).compare(combination1, combination2) > 0;
+    Provider provider = new Provider();
+    provider.setObjectType("application");
+    Activity activity2 = new Activity().withProvider(provider);
+    ObjectCombination combination3 = new ObjectCombination();
+    ObjectCombination combination4 = new ObjectCombination().withProvider("application");
+    assert (new ObjectCombinationSpecificOrdering(activity2)).compare(combination3, combination4) > 0;
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionResolverTest.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionResolverTest.java b/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionResolverTest.java
index e3445c5..b4d2b2e 100644
--- a/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionResolverTest.java
+++ b/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionResolverTest.java
@@ -19,91 +19,92 @@
 
 package org.apache.streams.verbs;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Sets;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Sets;
 import org.junit.Test;
 
 import java.util.List;
 
 /**
- * Tests for {$link: org.apache.streams.verbs.VerbDefinitionResolver}
+ * Tests for {$link: org.apache.streams.verbs.VerbDefinitionResolver}.
  */
 public class VerbDefinitionResolverTest {
 
-    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    /**
-     * Test of matchingVerbDefinitions
-     */
-    @Test
-    public void testMatchingVerbDefinitions() throws Exception {
-        VerbDefinition definition = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
-        VerbDefinitionResolver resolver = new VerbDefinitionResolver(Sets.newHashSet(definition));
-        Activity activity0 = mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class);
-        List<VerbDefinition> result0 = resolver.matchingVerbDefinitions(activity0);
-        assert result0.size() == 0;
-        Activity activity1 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class);
-        List<VerbDefinition> result1 = resolver.matchingVerbDefinitions(activity1);
-        assert result1.size() == 1;
-        assert definition.equals(result1.get(0));
-        Activity activity2 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"product\"}}\n", Activity.class);
-        List<VerbDefinition> result2 = resolver.matchingVerbDefinitions(activity2);
-        assert result2.size() == 1;
-        assert definition.equals(result2.get(0));
-        Activity activity3 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"application\"}}\n\n", Activity.class);
-        List<VerbDefinition> result3 = resolver.matchingVerbDefinitions(activity3);
-        assert result3.size() == 1;
-        assert definition.equals(result3.get(0));
-        Activity activity4 = mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}\n", Activity.class);
-        List<VerbDefinition> result4 = resolver.matchingVerbDefinitions(activity4);
-        assert result4.size() == 1;
-        assert definition.equals(result4.get(0));
-        Activity activity5 = mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"target\":{\"id\":\"targetId\",\"objectType\":\"group\"}}\n", Activity.class);
-        List<VerbDefinition> result5 = resolver.matchingVerbDefinitions(activity5);
-        assert result5.size() == 1;
-        assert definition.equals(result5.get(0));
-        Activity activity6 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"}}\n", Activity.class);
-        List<VerbDefinition> result6 = resolver.matchingVerbDefinitions(activity6);
-        assert result6.size() == 1;
-        assert definition.equals(result6.get(0));
-    }
+  /**
+   * Test of matchingVerbDefinitions.
+   */
+  @Test
+  public void testMatchingVerbDefinitions() throws Exception {
+    VerbDefinition definition = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
+    VerbDefinitionResolver resolver = new VerbDefinitionResolver(Sets.newHashSet(definition));
+    Activity activity0 = mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class);
+    List<VerbDefinition> result0 = resolver.matchingVerbDefinitions(activity0);
+    assert result0.size() == 0;
+    Activity activity1 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class);
+    List<VerbDefinition> result1 = resolver.matchingVerbDefinitions(activity1);
+    assert result1.size() == 1;
+    assert definition.equals(result1.get(0));
+    Activity activity2 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"product\"}}\n", Activity.class);
+    List<VerbDefinition> result2 = resolver.matchingVerbDefinitions(activity2);
+    assert result2.size() == 1;
+    assert definition.equals(result2.get(0));
+    Activity activity3 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"application\"}}\n\n", Activity.class);
+    List<VerbDefinition> result3 = resolver.matchingVerbDefinitions(activity3);
+    assert result3.size() == 1;
+    assert definition.equals(result3.get(0));
+    Activity activity4 = mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}\n", Activity.class);
+    List<VerbDefinition> result4 = resolver.matchingVerbDefinitions(activity4);
+    assert result4.size() == 1;
+    assert definition.equals(result4.get(0));
+    Activity activity5 = mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"target\":{\"id\":\"targetId\",\"objectType\":\"group\"}}\n", Activity.class);
+    List<VerbDefinition> result5 = resolver.matchingVerbDefinitions(activity5);
+    assert result5.size() == 1;
+    assert definition.equals(result5.get(0));
+    Activity activity6 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"page\"}}\n", Activity.class);
+    List<VerbDefinition> result6 = resolver.matchingVerbDefinitions(activity6);
+    assert result6.size() == 1;
+    assert definition.equals(result6.get(0));
+  }
 
-    /**
-     * Test of matchingObjectCombinations
-     */
-    @Test
-    public void testMatchingObjectCombinations() throws Exception {
-        VerbDefinition provider = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/provider.json"), VerbDefinition.class);
-        VerbDefinition actor = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/actor.json"), VerbDefinition.class);
-        VerbDefinition object = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/object.json"), VerbDefinition.class);
-        VerbDefinition post = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
-        VerbDefinition follow = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/follow.json"), VerbDefinition.class);
-        VerbDefinitionResolver resolver = new VerbDefinitionResolver(Sets.newHashSet(provider, actor, object, post, follow));
-        Activity activity0 = mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class);
-        List<ObjectCombination> result0 = resolver.matchingObjectCombinations(activity0);
-        assert result0.size() == 0;
-        Activity activity1 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class);
-        List<ObjectCombination> result1 = resolver.matchingObjectCombinations(activity1);
-        assert result1.size() == 4;
-        Activity activity2 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"product\"}}\n", Activity.class);
-        List<ObjectCombination> result2 = resolver.matchingObjectCombinations(activity2);
-        assert result2.size() == 3;
-        Activity activity3 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"application\"}}\n", Activity.class);
-        List<ObjectCombination> result3 = resolver.matchingObjectCombinations(activity3);
-        assert result3.size() == 4;
-        assert provider.getObjects().get(0).equals(result3.get(0));
-        Activity activity4 = mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}\n", Activity.class);
-        List<ObjectCombination> result4 = resolver.matchingObjectCombinations(activity4);
-        assert result4.size() == 4;
-        assert object.getObjects().get(0).equals(result4.get(0));
-        Activity activity5 = mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"target\":{\"id\":\"targetId\",\"objectType\":\"group\"}}\n", Activity.class);
-        List<ObjectCombination> result5 = resolver.matchingObjectCombinations(activity5);
-        assert result5.size() == 4;
-        Activity activity6 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"person\"}}\n", Activity.class);
-        List<ObjectCombination> result6 = resolver.matchingObjectCombinations(activity6);
-        assert result6.size() == 4;
-        assert actor.getObjects().get(0).equals(result6.get(0));
-    }
+  /**
+   * Test of matchingObjectCombinations.
+   */
+  @Test
+  public void testMatchingObjectCombinations() throws Exception {
+    VerbDefinition provider = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/provider.json"), VerbDefinition.class);
+    VerbDefinition actor = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/actor.json"), VerbDefinition.class);
+    VerbDefinition object = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/object.json"), VerbDefinition.class);
+    VerbDefinition post = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
+    VerbDefinition follow = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/follow.json"), VerbDefinition.class);
+    VerbDefinitionResolver resolver = new VerbDefinitionResolver(Sets.newHashSet(provider, actor, object, post, follow));
+    Activity activity0 = mapper.readValue("{\"id\":\"1\",\"verb\":\"notpost\"}\n", Activity.class);
+    List<ObjectCombination> result0 = resolver.matchingObjectCombinations(activity0);
+    assert result0.size() == 0;
+    Activity activity1 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\"}\n", Activity.class);
+    List<ObjectCombination> result1 = resolver.matchingObjectCombinations(activity1);
+    assert result1.size() == 4;
+    Activity activity2 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"product\"}}\n", Activity.class);
+    List<ObjectCombination> result2 = resolver.matchingObjectCombinations(activity2);
+    assert result2.size() == 3;
+    Activity activity3 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"provider\":{\"id\":\"providerId\",\"objectType\":\"application\"}}\n", Activity.class);
+    List<ObjectCombination> result3 = resolver.matchingObjectCombinations(activity3);
+    assert result3.size() == 4;
+    assert provider.getObjects().get(0).equals(result3.get(0));
+    Activity activity4 = mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"object\":{\"id\":\"objectId\",\"objectType\":\"task\"}}\n", Activity.class);
+    List<ObjectCombination> result4 = resolver.matchingObjectCombinations(activity4);
+    assert result4.size() == 4;
+    assert object.getObjects().get(0).equals(result4.get(0));
+    Activity activity5 = mapper.readValue("{\"id\":\"id\",\"verb\":\"post\",\"target\":{\"id\":\"targetId\",\"objectType\":\"group\"}}\n", Activity.class);
+    List<ObjectCombination> result5 = resolver.matchingObjectCombinations(activity5);
+    assert result5.size() == 4;
+    Activity activity6 = mapper.readValue("{\"id\":\"1\",\"verb\":\"post\",\"actor\":{\"id\":\"actorId\",\"objectType\":\"person\"}}\n", Activity.class);
+    List<ObjectCombination> result6 = resolver.matchingObjectCombinations(activity6);
+    assert result6.size() == 4;
+    assert actor.getObjects().get(0).equals(result6.get(0));
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTemplateTest.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTemplateTest.java b/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTemplateTest.java
index eeb61f7..ba91b06 100644
--- a/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTemplateTest.java
+++ b/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTemplateTest.java
@@ -19,67 +19,66 @@
 
 package org.apache.streams.verbs;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Provider;
-import org.junit.Test;
 
-import java.lang.annotation.Target;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.junit.Test;
 
 /**
- * Tests for {$link: org.apache.streams.verbs.VerbDefinitionTemplateUtil}
+ * Tests for {$link: org.apache.streams.verbs.VerbDefinitionTemplateUtil}.
  */
 public class VerbDefinitionTemplateTest {
 
-    ObjectMapper mapper = new ObjectMapper();
+  ObjectMapper mapper = new ObjectMapper();
 
-    /**
-     * Test application of template with no field
-     */
-    @Test
-    public void testNoField() throws Exception {
-        Activity activity = new Activity().withVerb("nofields");
-        VerbDefinition definition = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/nofields.json"), VerbDefinition.class);
-        assert VerbDefinitionTemplateUtil.asString(activity, definition.getObjects().get(0)).contains("something");
-    }
+  /**
+   * Test application of template with no field.
+   */
+  @Test
+  public void testNoField() throws Exception {
+    Activity activity = new Activity().withVerb("nofields");
+    VerbDefinition definition = mapper.readValue(VerbDefinitionResolverTest.class.getResourceAsStream("/nofields.json"), VerbDefinition.class);
+    assert VerbDefinitionTemplateUtil.asString(activity, definition.getObjects().get(0)).contains("something");
+  }
 
-    /**
-     * Test application of template with top-level fields
-     */
-    @Test
-    public void testTopField() throws Exception {
-        ActivityObject actor = new ActivityObject();
-        actor.setObjectType("page");
-        actor.setDisplayName("Paige");
-        Provider provider = new Provider();
-        provider.setObjectType("application");
-        provider.setDisplayName("Ahp");
-        ActivityObject object = new ActivityObject();
-        object.setObjectType("task");
-        object.setDisplayName("Tsk");
-        ActivityObject target = new ActivityObject();
-        target.setObjectType("person");
-        target.setDisplayName("Homie");
-        Activity activity = new Activity().withVerb("post");
-        activity.setActor(actor);
-        activity.setProvider(provider);
-        activity.setObject(object);
-        activity.setTarget(target);
-        VerbDefinition definition = mapper.readValue(VerbDefinitionTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
-        String message = VerbDefinitionTemplateUtil.asString(activity, definition.getObjects().get(0));
-        assert message.contains("Paige");
-        assert message.contains("Ahp");
-        assert message.contains("Tsk");
-        assert message.contains("Homie");
-    }
+  /**
+   * Test application of template with top-level fields.
+   */
+  @Test
+  public void testTopField() throws Exception {
+    ActivityObject actor = new ActivityObject();
+    actor.setObjectType("page");
+    actor.setDisplayName("Paige");
+    Provider provider = new Provider();
+    provider.setObjectType("application");
+    provider.setDisplayName("Ahp");
+    ActivityObject object = new ActivityObject();
+    object.setObjectType("task");
+    object.setDisplayName("Tsk");
+    ActivityObject target = new ActivityObject();
+    target.setObjectType("person");
+    target.setDisplayName("Homie");
+    Activity activity = new Activity().withVerb("post");
+    activity.setActor(actor);
+    activity.setProvider(provider);
+    activity.setObject(object);
+    activity.setTarget(target);
+    VerbDefinition definition = mapper.readValue(VerbDefinitionTest.class.getResourceAsStream("/post.json"), VerbDefinition.class);
+    String message = VerbDefinitionTemplateUtil.asString(activity, definition.getObjects().get(0));
+    assert message.contains("Paige");
+    assert message.contains("Ahp");
+    assert message.contains("Tsk");
+    assert message.contains("Homie");
+  }
 
-    /**
-     * Test application of template with second-level fields
-     */
-    @Test
-    public void testSecondFields() {
+  /**
+   * Test application of template with second-level fields.
+   */
+  @Test
+  public void testSecondFields() {
 
-    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTest.java
----------------------------------------------------------------------
diff --git a/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTest.java b/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTest.java
index 6be59b1..c481f5c 100644
--- a/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTest.java
+++ b/streams-verbs/src/test/java/org/apache/streams/verbs/VerbDefinitionTest.java
@@ -22,42 +22,45 @@ package org.apache.streams.verbs;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.junit.Test;
 
+/**
+ * Unit tests for VerbDefinition and utils.
+ */
 public class VerbDefinitionTest {
 
-    ObjectMapper mapper = new ObjectMapper();
-
-    /**
-     * Test read verb definition from json
-     */
-    @Test
-    public void testReadVerbDefinitionJson() throws Exception {
-
-        VerbDefinition definition = mapper.readValue(VerbDefinitionTest.class.getResourceAsStream("/do.json"), VerbDefinition.class);
-
-        assert definition != null;
-        assert definition.getObjectType().equals("verb");
-        assert definition.getObjects().size() == 1;
-        assert definition.getObjects().get(0).getActor().equals("*");
-        assert definition.getObjects().get(0).getObject().equals("*");
-        assert definition.getObjects().get(0).getTarget().equals("*");
-        assert definition.getObjects().get(0).getProvider().equals("*");
-        assert definition.getObjects().get(0).getTemplates().getAdditionalProperties().size() == 1;
-    }
-
-    /**
-     * Test verb definition defaults are set
-     */
-    @Test
-    public void testObjectCombinationDefaults() throws Exception {
-
-        ObjectCombination combination = new ObjectCombination();
-
-        assert combination.getActor().equals("*");
-        assert combination.getObject().equals("*");
-        assert combination.getTarget().equals("*");
-        assert combination.getProvider().equals("*");
-        assert combination.getTargetRequired() == false;
-
-    }
+  ObjectMapper mapper = new ObjectMapper();
+
+  /**
+   * Test read verb definition from json.
+   */
+  @Test
+  public void testReadVerbDefinitionJson() throws Exception {
+
+    VerbDefinition definition = mapper.readValue(VerbDefinitionTest.class.getResourceAsStream("/do.json"), VerbDefinition.class);
+
+    assert definition != null;
+    assert definition.getObjectType().equals("verb");
+    assert definition.getObjects().size() == 1;
+    assert definition.getObjects().get(0).getActor().equals("*");
+    assert definition.getObjects().get(0).getObject().equals("*");
+    assert definition.getObjects().get(0).getTarget().equals("*");
+    assert definition.getObjects().get(0).getProvider().equals("*");
+    assert definition.getObjects().get(0).getTemplates().getAdditionalProperties().size() == 1;
+  }
+
+  /**
+   * Test verb definition defaults are set.
+   */
+  @Test
+  public void testObjectCombinationDefaults() throws Exception {
+
+    ObjectCombination combination = new ObjectCombination();
+
+    assert combination.getActor().equals("*");
+    assert combination.getObject().equals("*");
+    assert combination.getTarget().equals("*");
+    assert combination.getProvider().equals("*");
+    assert combination.getTargetRequired() == false;
+
+  }
 
 }


[09/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/test/java/org/apache/streams/pojo/test/RFC3339UtilsTest.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/test/java/org/apache/streams/pojo/test/RFC3339UtilsTest.java b/streams-pojo/src/test/java/org/apache/streams/pojo/test/RFC3339UtilsTest.java
index b44e72a..bce756a 100644
--- a/streams-pojo/src/test/java/org/apache/streams/pojo/test/RFC3339UtilsTest.java
+++ b/streams-pojo/src/test/java/org/apache/streams/pojo/test/RFC3339UtilsTest.java
@@ -19,6 +19,7 @@
 package org.apache.streams.pojo.test;
 
 import org.apache.streams.data.util.RFC3339Utils;
+
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.junit.Test;
@@ -38,189 +39,190 @@ import static org.junit.Assert.fail;
  */
 public class RFC3339UtilsTest {
 
-    @Test
-    public void validUTC() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00Z");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(12)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-    }
-
-    @Test
-    public void validUTCSubSecond() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00.7Z");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(12)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(700)));
-    }
-
-    @Test
-    public void validUTCSubSecondMultiDigit() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00.7343Z");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(12)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
-    }
-
-    @Test
-    public void validEST() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00-05:00");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-    }
-
-    @Test
-    public void validESTSubSecond() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00.7-05:00");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(700)));
-    }
-
-    @Test
-    public void validESTSubSecondMultiDigit() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00.7343-05:00");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
-    }
-
-    @Test
-    public void validESTNoSeparator() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00-0500");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-    }
-
-    @Test
-    public void validESTSubSecondNoSeparator() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00.7-0500");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(700)));
-    }
-
-    @Test
-    public void validESTSubSecondMultiDigitNoSeparator() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00.7343-0500");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
-    }
-
-    @Test
-    public void validCET() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00+01:00");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(11)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-    }
-
-    @Test
-    public void validCETSubSecond() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00.7+01:00");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(11)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(700)));
-    }
-
-    @Test
-    public void validCETSubSecondMultidigit() {
-        DateTime parsed = parseUTC("2014-12-25T12:00:00.7343+01:00");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(11)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
-    }
-
-    @Test
-    public void validLong() {
-        DateTime parsed = parseUTC("1419505200734");
-        assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
-        assertThat(parsed.hourOfDay().get(), is(equalTo(11)));
-        assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
-        assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
-        assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
-    }
-
-    @Test
-    public void validFormatUTC() {
-        DateTime parsed = new DateTime(1419505200734L);
-        assertThat(format(parsed), is(equalTo("2014-12-25T11:00:00.734Z")));
-    }
-
-    @Test
-    public void validFormat() {
-        TimeZone cet = TimeZone.getTimeZone("CET");
-        DateTime parsed = new DateTime(1419505200734L);
-        assertThat(format(parsed, cet), is(equalTo("2014-12-25T12:00:00.734+0100")));
-    }
-
-    @Test
-    public void testParseVariousDateFormats() {
-        String date = "Thu April 24 04:43:10 -0500 2014";
-        DateTime expected = new DateTime(2014, 4, 24, 9, 43, 10, DateTimeZone.forOffsetHours(0));
-        testHelper(expected, date);
-        date = "2014/04/24 04:43:10";
-        expected = new DateTime(2014, 4, 24, 4, 43, 10, DateTimeZone.forOffsetHours(0));
-        testHelper(expected, date);
-        date = "2014-04-24T04:43:10Z";
-        testHelper(expected, date);
-        date = "04:43:10 2014/04/24";
-        testHelper(expected, date);
-        date = "4/24/2014 04:43:10";
-        testHelper(expected, date);
-        date = "04:43:10 4/24/2014";
-        testHelper(expected, date);
-        date = "04:43:10 2014-04-24";
-        testHelper(expected, date);
-        date = "4-24-2014 04:43:10";
-        testHelper(expected, date);
-        date = "04:43:10 4-24-2014";
-        testHelper(expected, date);
-        expected = new DateTime(2014, 4, 24, 0, 0, 0, DateTimeZone.forOffsetHours(0));
-        date = "24-4-2014";
-        testHelper(expected, date);
-        date = "2014-4-24";
-        testHelper(expected, date);
-        date = "2014/4/24";
-        testHelper(expected, date);
-        date = "2014/4/24 fesdfs";
-        try {
-            RFC3339Utils.parseToUTC(date);
-            fail("Should not have been able to parse : "+date);
-        } catch (Exception e) {
-        }
-    }
-
-    private void testHelper(DateTime expected, String dateString) {
-        DateTime parsedDate = RFC3339Utils.parseToUTC(dateString);
-        assertEquals("Failed to parse : "+dateString, expected, parsedDate);
-        String rfc3339String = RFC3339Utils.format(dateString);
-        String parsedRfc3339String = RFC3339Utils.format(parsedDate);
-        assertEquals("Parsed String should be equal.", parsedRfc3339String, rfc3339String);
-        DateTime convertedBack = RFC3339Utils.parseToUTC(parsedRfc3339String);
-        assertEquals(expected, convertedBack);
-    }
+  @Test
+  public void validUTC() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00Z");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(12)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+  }
+
+  @Test
+  public void validUTCSubSecond() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00.7Z");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(12)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(700)));
+  }
+
+  @Test
+  public void validUTCSubSecondMultiDigit() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00.7343Z");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(12)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
+  }
+
+  @Test
+  public void validEST() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00-05:00");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+  }
+
+  @Test
+  public void validESTSubSecond() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00.7-05:00");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(700)));
+  }
+
+  @Test
+  public void validESTSubSecondMultiDigit() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00.7343-05:00");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
+  }
+
+  @Test
+  public void validESTNoSeparator() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00-0500");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+  }
+
+  @Test
+  public void validESTSubSecondNoSeparator() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00.7-0500");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(700)));
+  }
+
+  @Test
+  public void validESTSubSecondMultiDigitNoSeparator() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00.7343-0500");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(17)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
+  }
+
+  @Test
+  public void validCET() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00+01:00");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(11)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+  }
+
+  @Test
+  public void validCETSubSecond() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00.7+01:00");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(11)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(700)));
+  }
+
+  @Test
+  public void validCETSubSecondMultidigit() {
+    DateTime parsed = parseUTC("2014-12-25T12:00:00.7343+01:00");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(11)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
+  }
+
+  @Test
+  public void validLong() {
+    DateTime parsed = parseUTC("1419505200734");
+    assertThat(parsed.minuteOfHour().get(), is(equalTo(0)));
+    assertThat(parsed.hourOfDay().get(), is(equalTo(11)));
+    assertThat(parsed.dayOfMonth().get(), is(equalTo(25)));
+    assertThat(parsed.monthOfYear().get(), is(equalTo(12)));
+    assertThat(parsed.millisOfSecond().get(), is(equalTo(734)));
+  }
+
+  @Test
+  public void validFormatUTC() {
+    DateTime parsed = new DateTime(1419505200734L);
+    assertThat(format(parsed), is(equalTo("2014-12-25T11:00:00.734Z")));
+  }
+
+  @Test
+  public void validFormat() {
+    TimeZone cet = TimeZone.getTimeZone("CET");
+    DateTime parsed = new DateTime(1419505200734L);
+    assertThat(format(parsed, cet), is(equalTo("2014-12-25T12:00:00.734+0100")));
+  }
+
+  @Test
+  public void testParseVariousDateFormats() {
+    String date = "Thu April 24 04:43:10 -0500 2014";
+    DateTime expected = new DateTime(2014, 4, 24, 9, 43, 10, DateTimeZone.forOffsetHours(0));
+    testHelper(expected, date);
+    date = "2014/04/24 04:43:10";
+    expected = new DateTime(2014, 4, 24, 4, 43, 10, DateTimeZone.forOffsetHours(0));
+    testHelper(expected, date);
+    date = "2014-04-24T04:43:10Z";
+    testHelper(expected, date);
+    date = "04:43:10 2014/04/24";
+    testHelper(expected, date);
+    date = "4/24/2014 04:43:10";
+    testHelper(expected, date);
+    date = "04:43:10 4/24/2014";
+    testHelper(expected, date);
+    date = "04:43:10 2014-04-24";
+    testHelper(expected, date);
+    date = "4-24-2014 04:43:10";
+    testHelper(expected, date);
+    date = "04:43:10 4-24-2014";
+    testHelper(expected, date);
+    expected = new DateTime(2014, 4, 24, 0, 0, 0, DateTimeZone.forOffsetHours(0));
+    date = "24-4-2014";
+    testHelper(expected, date);
+    date = "2014-4-24";
+    testHelper(expected, date);
+    date = "2014/4/24";
+    testHelper(expected, date);
+    date = "2014/4/24 fesdfs";
+    try {
+      RFC3339Utils.parseToUTC(date);
+      fail("Should not have been able to parse : " + date);
+    } catch (Exception ex) {
+      //
+    }
+  }
+
+  private void testHelper(DateTime expected, String dateString) {
+    DateTime parsedDate = RFC3339Utils.parseToUTC(dateString);
+    assertEquals("Failed to parse : " + dateString, expected, parsedDate);
+    String rfc3339String = RFC3339Utils.format(dateString);
+    String parsedRfc3339String = RFC3339Utils.format(parsedDate);
+    assertEquals("Parsed String should be equal.", parsedRfc3339String, rfc3339String);
+    DateTime convertedBack = RFC3339Utils.parseToUTC(parsedRfc3339String);
+    assertEquals(expected, convertedBack);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/GenericWebhookResource.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/GenericWebhookResource.java b/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/GenericWebhookResource.java
index ce4388e..a09abda 100644
--- a/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/GenericWebhookResource.java
+++ b/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/GenericWebhookResource.java
@@ -18,19 +18,26 @@
 
 package org.apache.streams.dropwizard;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Queues;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Queues;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.math.BigInteger;
+import java.util.Queue;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.regex.Pattern;
 import javax.annotation.Resource;
 import javax.ws.rs.Consumes;
 import javax.ws.rs.POST;
@@ -40,17 +47,11 @@ import javax.ws.rs.core.Context;
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.math.BigInteger;
-import java.util.List;
-import java.util.Queue;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-import java.util.regex.Pattern;
 
 /**
  * GenericWebhookResource provides basic webhook connectivity.
  *
+ * <p/>
  * Add processors / persistWriters that read from "GenericWebhookResource" to
  * consume data posted to streams.
  */
@@ -60,205 +61,222 @@ import java.util.regex.Pattern;
 @Consumes(MediaType.APPLICATION_JSON)
 public class GenericWebhookResource implements StreamsProvider {
 
-    public static final String STREAMS_ID = "GenericWebhookResource";
+  public static final String STREAMS_ID = "GenericWebhookResource";
 
-    public GenericWebhookResource() {
-    }
-
-    private static final Logger log = LoggerFactory
-            .getLogger(GenericWebhookResource.class);
+  public GenericWebhookResource() {
+  }
 
-    private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final Logger log = LoggerFactory
+      .getLogger(GenericWebhookResource.class);
 
-    protected Queue<StreamsDatum> providerQueue = new ConcurrentLinkedQueue<>();
+  private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+  protected Queue<StreamsDatum> providerQueue = new ConcurrentLinkedQueue<>();
 
-    private static Pattern newLinePattern = Pattern.compile("(\\r\\n?|\\n)", Pattern.MULTILINE);
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  private static Pattern newLinePattern = Pattern.compile("(\\r\\n?|\\n)", Pattern.MULTILINE);
 
-    @POST
-    @Path("json")
-    public Response json(@Context HttpHeaders headers,
-                                  String body) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        ObjectNode response = mapper.createObjectNode();
-        int responseCode = Response.Status.BAD_REQUEST.getStatusCode();
+  /**
+   * push a String json datum into a stream.
+   * @param headers HttpHeaders
+   * @param body String json
+   * @return Response
+   */
+  @POST
+  @Path("json")
+  public Response json(@Context HttpHeaders headers,
+                       String body) {
 
-        try {
-            ObjectNode item = mapper.readValue(body, ObjectNode.class);
+    ObjectNode response = mapper.createObjectNode();
+    int responseCode = Response.Status.BAD_REQUEST.getStatusCode();
 
-            StreamsDatum datum = new StreamsDatum(body);
+    try {
+      ObjectNode item = mapper.readValue(body, ObjectNode.class);
 
-            lock.writeLock().lock();
-            ComponentUtils.offerUntilSuccess(datum, providerQueue);
-            lock.writeLock().unlock();
+      StreamsDatum datum = new StreamsDatum(body);
 
-            Boolean success = true;
+      lock.writeLock().lock();
+      ComponentUtils.offerUntilSuccess(datum, providerQueue);
+      lock.writeLock().unlock();
 
-            response.put("success", success);
+      Boolean success = true;
 
-            responseCode = Response.Status.OK.getStatusCode();
+      response.put("success", success);
 
-        } catch (Exception e) {
-            log.warn(e.toString(), e);
+      responseCode = Response.Status.OK.getStatusCode();
 
-            Boolean success = false;
+    } catch (Exception ex) {
+      log.warn(ex.toString(), ex);
 
-            response.put("success", success);
-            responseCode = Response.Status.BAD_REQUEST.getStatusCode();
+      Boolean success = false;
 
-        } finally {
-            return Response.status(responseCode).entity(response).build();
+      response.put("success", success);
+      responseCode = Response.Status.BAD_REQUEST.getStatusCode();
 
-        }
+    } finally {
+      return Response.status(responseCode).entity(response).build();
     }
+  }
 
-    @POST
-    @Path("json_new_line")
-    public Response json_new_line(@Context HttpHeaders headers,
-                                           String body) {
+  /**
+   * push multiple String json datums into a stream.
+   * @param headers HttpHeaders
+   * @param body String json
+   * @return Response
+   */
+  @POST
+  @Path("json_new_line")
+  public Response json_new_line(@Context HttpHeaders headers,
+                                String body) {
 
-        ObjectNode response = mapper.createObjectNode();
-        int responseCode = Response.Status.BAD_REQUEST.getStatusCode();
+    ObjectNode response = mapper.createObjectNode();
+    int responseCode = Response.Status.BAD_REQUEST.getStatusCode();
 
-        if (body.equalsIgnoreCase("{}")) {
+    if (body.equalsIgnoreCase("{}")) {
 
-            Boolean success = true;
+      Boolean success = true;
 
-            response.put("success", success);
-            responseCode = Response.Status.OK.getStatusCode();
-            return Response.status(responseCode).entity(response).build();
-        }
-
-        try {
+      response.put("success", success);
+      responseCode = Response.Status.OK.getStatusCode();
+      return Response.status(responseCode).entity(response).build();
+    }
 
-            for( String line : Splitter.on(newLinePattern).split(body)) {
-                ObjectNode item = mapper.readValue(line, ObjectNode.class);
+    try {
 
-                StreamsDatum datum = new StreamsDatum(item);
+      for ( String line : Splitter.on(newLinePattern).split(body)) {
+        ObjectNode item = mapper.readValue(line, ObjectNode.class);
 
-                lock.writeLock().lock();
-                ComponentUtils.offerUntilSuccess(datum, providerQueue);
-                lock.writeLock().unlock();
+        StreamsDatum datum = new StreamsDatum(item);
 
-            }
+        lock.writeLock().lock();
+        ComponentUtils.offerUntilSuccess(datum, providerQueue);
+        lock.writeLock().unlock();
 
-            Boolean success = true;
+      }
 
-            response.put("success", success);
-            responseCode = Response.Status.OK.getStatusCode();
+      Boolean success = true;
 
-        } catch (Exception e) {
-            log.warn(e.toString(), e);
+      response.put("success", success);
+      responseCode = Response.Status.OK.getStatusCode();
 
-            Boolean success = false;
+    } catch (Exception ex) {
+      log.warn(ex.toString(), ex);
 
-            response.put("success", success);
-            responseCode = Response.Status.BAD_REQUEST.getStatusCode();
+      Boolean success = false;
 
-        } finally {
-            return Response.status(responseCode).entity(response).build();
+      response.put("success", success);
+      responseCode = Response.Status.BAD_REQUEST.getStatusCode();
 
-        }
+    } finally {
+      return Response.status(responseCode).entity(response).build();
 
     }
 
-    @POST
-    @Path("json_meta")
-    public Response json_meta(@Context HttpHeaders headers,
-                                       String body) {
+  }
 
-        ObjectNode response = mapper.createObjectNode();
-        int responseCode = Response.Status.BAD_REQUEST.getStatusCode();
+  /**
+   * push multiple ObjectNode json datums into a stream.
+   * @param headers HttpHeaders
+   * @param body String json
+   * @return Response
+   */
+  @POST
+  @Path("json_meta")
+  public Response json_meta(@Context HttpHeaders headers,
+                            String body) {
 
-        if (body.equalsIgnoreCase("{}")) {
+    ObjectNode response = mapper.createObjectNode();
+    int responseCode = Response.Status.BAD_REQUEST.getStatusCode();
 
-            Boolean success = true;
+    if (body.equalsIgnoreCase("{}")) {
 
-            response.put("success", success);
-            responseCode = Response.Status.OK.getStatusCode();
+      Boolean success = true;
 
-            return Response.status(responseCode).entity(response).build();
-        }
+      response.put("success", success);
+      responseCode = Response.Status.OK.getStatusCode();
 
-        try {
+      return Response.status(responseCode).entity(response).build();
+    }
 
-            GenericWebhookData objectWrapper = mapper.readValue(body, GenericWebhookData.class);
+    try {
 
-            for( ObjectNode item : objectWrapper.getData()) {
+      GenericWebhookData objectWrapper = mapper.readValue(body, GenericWebhookData.class);
 
-                StreamsDatum datum = new StreamsDatum(item);
+      for ( ObjectNode item : objectWrapper.getData()) {
 
-                lock.writeLock().lock();
-                ComponentUtils.offerUntilSuccess(datum, providerQueue);
-                lock.writeLock().unlock();
-            }
+        StreamsDatum datum = new StreamsDatum(item);
 
-            Boolean success = true;
+        lock.writeLock().lock();
+        ComponentUtils.offerUntilSuccess(datum, providerQueue);
+        lock.writeLock().unlock();
+      }
 
-            response.put("success", success);
-            responseCode = Response.Status.OK.getStatusCode();
+      Boolean success = true;
 
-        } catch (Exception e) {
-            log.warn(e.toString(), e);
+      response.put("success", success);
+      responseCode = Response.Status.OK.getStatusCode();
 
-            Boolean success = false;
+    } catch (Exception ex) {
+      log.warn(ex.toString(), ex);
 
-            response.put("success", success);
-            responseCode = Response.Status.BAD_REQUEST.getStatusCode();
-        } finally {
-            return Response.status(responseCode).entity(response).build();
-        }
+      Boolean success = false;
 
+      response.put("success", success);
+      responseCode = Response.Status.BAD_REQUEST.getStatusCode();
+    } finally {
+      return Response.status(responseCode).entity(response).build();
     }
 
-    @Override
-    public void startStream() {
-        return;
-    }
+  }
 
-    @Override
-    public StreamsResultSet readCurrent() {
+  @Override
+  public void startStream() {
+    return;
+  }
 
-        StreamsResultSet current;
+  @Override
+  public StreamsResultSet readCurrent() {
 
-        lock.writeLock().lock();
-        current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(providerQueue));
-        providerQueue.clear();
-        lock.writeLock().unlock();
+    StreamsResultSet current;
 
-        return current;
+    lock.writeLock().lock();
+    current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(providerQueue));
+    providerQueue.clear();
+    lock.writeLock().unlock();
 
-    }
+    return current;
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
+  }
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
 
-    @Override
-    public boolean isRunning() {
-        return true;
-    }
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public boolean isRunning() {
+    return true;
+  }
 
-    }
+  @Override
+  public void prepare(Object configurationObject) {
 
-    @Override
-    public void cleanUp() {
+  }
 
-    }
+  @Override
+  public void cleanUp() {
+
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamDropwizardBuilder.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamDropwizardBuilder.java b/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamDropwizardBuilder.java
index 0fcc4eb..f13a41f 100644
--- a/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamDropwizardBuilder.java
+++ b/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamDropwizardBuilder.java
@@ -18,46 +18,47 @@
 
 package org.apache.streams.dropwizard;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamBuilder;
 import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.local.LocalRuntimeConfiguration;
 import org.apache.streams.local.builders.LocalStreamBuilder;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import java.util.Map;
 
 /**
- * StreamDropwizardBuilder is currently a light wrapper around LocalStreamBuilder
+ * StreamDropwizardBuilder is currently a light wrapper around LocalStreamBuilder.
  *
+ * <p/>
  * It's a seperate class because they will almost certainly deviate going forward
  */
 public class StreamDropwizardBuilder extends LocalStreamBuilder implements StreamBuilder {
 
-    public StreamDropwizardBuilder() {
-        super();
-    }
+  public StreamDropwizardBuilder() {
+    super();
+  }
 
-    public StreamDropwizardBuilder(StreamsConfiguration streamConfig) {
-        super(new ObjectMapper().convertValue(streamConfig, LocalRuntimeConfiguration.class));
-    }
+  public StreamDropwizardBuilder(StreamsConfiguration streamConfig) {
+    super(new ObjectMapper().convertValue(streamConfig, LocalRuntimeConfiguration.class));
+  }
 
-    public StreamDropwizardBuilder(Map<String, Object> streamConfig) {
-        super(streamConfig);
-    }
+  public StreamDropwizardBuilder(Map<String, Object> streamConfig) {
+    super(streamConfig);
+  }
 
-    public StreamDropwizardBuilder(int maxQueueCapacity) {
-        super(maxQueueCapacity);
-    }
+  public StreamDropwizardBuilder(int maxQueueCapacity) {
+    super(maxQueueCapacity);
+  }
 
-    public StreamDropwizardBuilder(int maxQueueCapacity, Map<String, Object> streamConfig) {
-        super(maxQueueCapacity, streamConfig);
-    }
+  public StreamDropwizardBuilder(int maxQueueCapacity, Map<String, Object> streamConfig) {
+    super(maxQueueCapacity, streamConfig);
+  }
 
-    @Override
-    public StreamBuilder newPerpetualStream(String streamId, StreamsProvider provider) {
-        return super.newPerpetualStream(streamId, provider);
-    }
+  @Override
+  public StreamBuilder newPerpetualStream(String streamId, StreamsProvider provider) {
+    return super.newPerpetualStream(streamId, provider);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsApplication.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsApplication.java b/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsApplication.java
index fecf2f7..38d0f7b 100644
--- a/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsApplication.java
+++ b/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsApplication.java
@@ -18,175 +18,163 @@
 
 package org.apache.streams.dropwizard;
 
-import com.codahale.metrics.Counter;
+import org.apache.streams.config.StreamsConfiguration;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.core.StreamsProvider;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.codahale.metrics.MetricRegistry;
-import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.datatype.guava.GuavaModule;
 import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
-import com.google.common.base.Strings;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
 import com.hubspot.dropwizard.guice.GuiceBundle;
-import com.sun.jersey.api.core.ResourceConfig;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigRenderOptions;
 import io.dropwizard.Application;
 import io.dropwizard.jackson.GuavaExtrasModule;
 import io.dropwizard.metrics.MetricsFactory;
 import io.dropwizard.setup.Bootstrap;
 import io.dropwizard.setup.Environment;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamBuilder;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistWriter;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.core.StreamsProvider;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.local.builders.LocalStreamBuilder;
-import org.apache.streams.pojo.json.Activity;
-import org.joda.time.DateTime;
-import org.reflections.Reflections;
-import org.reflections.util.ConfigurationBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.IOException;
-import java.math.BigInteger;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Executor;
 import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import com.google.inject.Inject;
-
-import javax.annotation.Resource;
-import javax.ws.rs.Path;
 
 /**
  * Entry point to a dropwizard streams application
  *
+ * <p/>
  * It will start up a stream in the local runtime, as well as bind any
  * StreamsProvider on the classpath with a @Resource annotation.
- *
  */
 public class StreamsApplication extends Application<StreamsDropwizardConfiguration> {
 
-    private static final Logger LOGGER = LoggerFactory
-			.getLogger(StreamsApplication.class);
-
-    protected static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final Logger LOGGER = LoggerFactory
+      .getLogger(StreamsApplication.class);
 
-    protected StreamBuilder builder;
+  protected static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    private static StreamsConfiguration streamsConfiguration;
+  protected StreamBuilder builder;
 
-    // ConcurrentHashSet is preferable, but it's only in guava 15+
-    // spark 1.5.0 uses guava 14 so for the moment this is the workaround
-    // Set<StreamsProvider> resourceProviders = Sets.newConcurrentHashSet();
-    private Set<StreamsProvider> resourceProviders = Collections.newSetFromMap(new ConcurrentHashMap<StreamsProvider, Boolean>());
+  private static StreamsConfiguration streamsConfiguration;
 
-    private Executor executor = Executors.newSingleThreadExecutor();
+  // ConcurrentHashSet is preferable, but it's only in guava 15+
+  // spark 1.5.0 uses guava 14 so for the moment this is the workaround
+  // Set<StreamsProvider> resourceProviders = Sets.newConcurrentHashSet();
+  private Set<StreamsProvider> resourceProviders = Collections.newSetFromMap(new ConcurrentHashMap<StreamsProvider, Boolean>());
 
-    static {
-        mapper.registerModule(new AfterburnerModule());
-        mapper.registerModule(new GuavaModule());
-        mapper.registerModule(new GuavaExtrasModule());
-    }
+  private Executor executor = Executors.newSingleThreadExecutor();
 
-    @Override
-    public void initialize(Bootstrap<StreamsDropwizardConfiguration> bootstrap) {
+  static {
+    mapper.registerModule(new AfterburnerModule());
+    mapper.registerModule(new GuavaModule());
+    mapper.registerModule(new GuavaExtrasModule());
+  }
 
-        LOGGER.info(getClass().getPackage().getName());
+  @Override
+  public void initialize(Bootstrap<StreamsDropwizardConfiguration> bootstrap) {
 
-        GuiceBundle<StreamsDropwizardConfiguration> guiceBundle =
-                GuiceBundle.<StreamsDropwizardConfiguration>newBuilder()
-                .addModule(new StreamsDropwizardModule())
-                .setConfigClass(StreamsDropwizardConfiguration.class)
-                // override and add more packages to pick up custom Resources
-                .enableAutoConfig(getClass().getPackage().getName())
-                .build();
-        bootstrap.addBundle(guiceBundle);
+    LOGGER.info(getClass().getPackage().getName());
 
-    }
+    GuiceBundle<StreamsDropwizardConfiguration> guiceBundle =
+        GuiceBundle.<StreamsDropwizardConfiguration>newBuilder()
+            .addModule(new StreamsDropwizardModule())
+            .setConfigClass(StreamsDropwizardConfiguration.class)
+            // override and add more packages to pick up custom Resources
+            .enableAutoConfig(getClass().getPackage().getName())
+            .build();
+    bootstrap.addBundle(guiceBundle);
 
-    @Override
-    public void run(StreamsDropwizardConfiguration streamsDropwizardConfiguration, Environment environment) throws Exception {
+  }
 
-        executor = Executors.newSingleThreadExecutor();
+  @Override
+  public void run(StreamsDropwizardConfiguration streamsDropwizardConfiguration, Environment environment) throws Exception {
 
-        for( Class<?> resourceProviderClass : environment.jersey().getResourceConfig().getRootResourceClasses() ) {
-            StreamsProvider provider = (StreamsProvider)resourceProviderClass.newInstance();
-            if( StreamsProvider.class.isInstance(provider))
-                resourceProviders.add(provider);
-        }
+    executor = Executors.newSingleThreadExecutor();
 
-        MetricRegistry metrics = new MetricRegistry();
-        MetricsFactory mfac = streamsDropwizardConfiguration.getMetricsFactory();
-        mfac.configure(environment.lifecycle(), metrics);
+    for ( Class<?> resourceProviderClass : environment.jersey().getResourceConfig().getRootResourceClasses() ) {
+      StreamsProvider provider = (StreamsProvider)resourceProviderClass.newInstance();
+      if ( StreamsProvider.class.isInstance(provider)) {
+        resourceProviders.add(provider);
+      }
+    }
 
-        streamsConfiguration = mapper.convertValue(streamsDropwizardConfiguration, StreamsConfiguration.class);
+    MetricRegistry metrics = new MetricRegistry();
+    MetricsFactory mfac = streamsDropwizardConfiguration.getMetricsFactory();
+    mfac.configure(environment.lifecycle(), metrics);
 
-        builder = setup(streamsConfiguration, resourceProviders);
+    streamsConfiguration = mapper.convertValue(streamsDropwizardConfiguration, StreamsConfiguration.class);
 
-        executor.execute(new StreamsDropwizardRunner(builder, streamsConfiguration));
+    builder = setup(streamsConfiguration, resourceProviders);
 
-        // wait for streams to start up
-        Thread.sleep(10000);
+    executor.execute(new StreamsDropwizardRunner(builder, streamsConfiguration));
 
-        for (StreamsProvider resource : resourceProviders) {
-            environment.jersey().register(resource);
-            LOGGER.info("Added resource class: {}", resource);
-        }
+    // wait for streams to start up
+    Thread.sleep(10000);
 
+    for (StreamsProvider resource : resourceProviders) {
+      environment.jersey().register(resource);
+      LOGGER.info("Added resource class: {}", resource);
     }
 
-    public StreamBuilder setup(StreamsConfiguration streamsConfiguration, Set<StreamsProvider> resourceProviders) {
+  }
 
-        StreamBuilder builder = new StreamDropwizardBuilder(streamsConfiguration);
+  /**
+   * setup StreamBuilder.
+   * @param streamsConfiguration StreamsConfiguration
+   * @param resourceProviders Set of StreamsProvider
+   * @return StreamBuilder
+   */
+  public StreamBuilder setup(StreamsConfiguration streamsConfiguration, Set<StreamsProvider> resourceProviders) {
 
-        List<String> providers = new ArrayList<>();
-        for( StreamsProvider provider: resourceProviders) {
-            String providerId = provider.getClass().getSimpleName();
-            builder.newPerpetualStream(providerId, provider);
-            providers.add(providerId);
-        }
+    StreamBuilder builder = new StreamDropwizardBuilder(streamsConfiguration);
 
-        return builder;
+    List<String> providers = new ArrayList<>();
+    for ( StreamsProvider provider: resourceProviders) {
+      String providerId = provider.getClass().getSimpleName();
+      builder.newPerpetualStream(providerId, provider);
+      providers.add(providerId);
     }
 
-    private class StreamsDropwizardRunner implements Runnable {
-
-        private StreamsConfiguration streamsConfiguration;
-
-        private StreamBuilder builder;
+    return builder;
+  }
 
-        protected StreamsDropwizardRunner(StreamBuilder builder, StreamsConfiguration streamsConfiguration) {
-            this.streamsConfiguration = streamsConfiguration;
-            this.builder = builder;
-        }
+  private class StreamsDropwizardRunner implements Runnable {
 
-        @Override
-        public void run() {
+    private StreamsConfiguration streamsConfiguration;
 
-            builder.start();
+    private StreamBuilder builder;
 
-        }
+    protected StreamsDropwizardRunner(StreamBuilder builder, StreamsConfiguration streamsConfiguration) {
+      this.streamsConfiguration = streamsConfiguration;
+      this.builder = builder;
     }
 
+    @Override
+    public void run() {
 
-    public static void main(String[] args) throws Exception
-    {
-
-        new StreamsApplication().run(args);
+      builder.start();
 
     }
+  }
+
+  /**
+   * Run from console:
+   *
+   * <p/>
+   * java -jar uber.jar server ./configuration.yml
+   *
+   * @param args ["server", configuration.yml]
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    new StreamsApplication().run(args);
+
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsDropwizardModule.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsDropwizardModule.java b/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsDropwizardModule.java
index 01682c0..9514caf 100644
--- a/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsDropwizardModule.java
+++ b/streams-runtimes/streams-runtime-dropwizard/src/main/java/org/apache/streams/dropwizard/StreamsDropwizardModule.java
@@ -18,35 +18,31 @@
 
 package org.apache.streams.dropwizard;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.google.inject.AbstractModule;
-import com.google.inject.Provides;
-import com.google.inject.Singleton;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigRenderOptions;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
 
-import java.io.IOException;
+import com.google.inject.AbstractModule;
+import com.google.inject.Provides;
+import com.google.inject.Singleton;
 
 /**
  * This class exists because dropwizard-guice requires at least
- * one module to run
+ * one module to run.
  *
+ * <p/>
  * Do not expect @Inject StreamsConfiguration to work at the moment.
  */
 public class StreamsDropwizardModule extends AbstractModule {
 
-    @Override
-    protected void configure() {
-        requestStaticInjection(StreamsConfiguration.class);
-    }
+  @Override
+  protected void configure() {
+    requestStaticInjection(StreamsConfiguration.class);
+  }
 
-    @Provides
-    @Singleton
-    public StreamsConfiguration providesStreamsConfiguration() {
-        return StreamsConfigurator.detectConfiguration();
-    }
+  @Provides
+  @Singleton
+  public StreamsConfiguration providesStreamsConfiguration() {
+    return StreamsConfigurator.detectConfiguration();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/GenericWebhookResourceTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/GenericWebhookResourceTest.java b/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/GenericWebhookResourceTest.java
index ffe9d62..a7251df 100644
--- a/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/GenericWebhookResourceTest.java
+++ b/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/GenericWebhookResourceTest.java
@@ -18,82 +18,70 @@
 
 package org.apache.streams.dropwizard.test;
 
-import com.fasterxml.jackson.core.JsonParseException;
+import org.apache.streams.dropwizard.GenericWebhookData;
+import org.apache.streams.dropwizard.GenericWebhookResource;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.collect.Lists;
-import com.google.common.io.Resources;
-import io.dropwizard.testing.junit.DropwizardAppRule;
 import io.dropwizard.testing.junit.ResourceTestRule;
-import org.apache.streams.dropwizard.GenericWebhookData;
-import org.apache.streams.dropwizard.GenericWebhookResource;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.joda.time.DateTime;
-import org.junit.After;
-import org.junit.AfterClass;
 import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
 import org.junit.ClassRule;
 import org.junit.Test;
 
-import javax.ws.rs.core.HttpHeaders;
-import javax.xml.ws.Response;
-
 import java.util.List;
 
-import static org.mockito.Mockito.*;
-
 /**
  * Tests {@link: org.apache.streams.dropwizard.GenericWebhookResource}
  */
 public class GenericWebhookResourceTest {
 
-    private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    private static final GenericWebhookResource genericWebhookResource = new GenericWebhookResource();
+  private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @ClassRule
-    public static final ResourceTestRule resources = ResourceTestRule.builder()
-            .addResource(genericWebhookResource)
-            .build();
+  private static final GenericWebhookResource genericWebhookResource = new GenericWebhookResource();
 
-    @Test
-    public void testPostJson() {
-        Assert.assertEquals(400, genericWebhookResource.json(null, "{").getStatus());
-        Assert.assertEquals(400, genericWebhookResource.json(null, "}").getStatus());
-        Assert.assertEquals(400, genericWebhookResource.json(null, "srg").getStatus());
-        Assert.assertEquals(400, genericWebhookResource.json(null, "123").getStatus());
-        Assert.assertEquals(200, genericWebhookResource.json(null, "{}").getStatus());
-        Assert.assertEquals(200, genericWebhookResource.json(null, "{\"valid\":\"true\"}").getStatus());
-    };
+  @ClassRule
+  public static final ResourceTestRule resources = ResourceTestRule.builder()
+      .addResource(genericWebhookResource)
+      .build();
 
-    @Test
-    public void testPostJsonNewLine() {
-        Assert.assertEquals(200, genericWebhookResource.json_new_line(null, "{}").getStatus());
-        Assert.assertEquals(400, genericWebhookResource.json_new_line(null, "notvalid").getStatus());
-        Assert.assertEquals(200, genericWebhookResource.json_new_line(null, "{\"valid\":\"true\"}").getStatus());
-        Assert.assertEquals(200, genericWebhookResource.json_new_line(null, "{\"valid\":\"true\"}\n{\"valid\":\"true\"}\r{\"valid\":\"true\"}").getStatus());
-    };
+  @Test
+  public void testPostJson() {
+    Assert.assertEquals(400, genericWebhookResource.json(null, "{").getStatus());
+    Assert.assertEquals(400, genericWebhookResource.json(null, "}").getStatus());
+    Assert.assertEquals(400, genericWebhookResource.json(null, "srg").getStatus());
+    Assert.assertEquals(400, genericWebhookResource.json(null, "123").getStatus());
+    Assert.assertEquals(200, genericWebhookResource.json(null, "{}").getStatus());
+    Assert.assertEquals(200, genericWebhookResource.json(null, "{\"valid\":\"true\"}").getStatus());
+  }
 
-    @Test
-    public void testPostJsonMeta() throws JsonProcessingException {
-        Assert.assertEquals(200, genericWebhookResource.json_meta(null, "{}").getStatus());
-        Assert.assertEquals(400, genericWebhookResource.json_meta(null, "notvalid").getStatus());
-        GenericWebhookData testPostJsonMeta = new GenericWebhookData()
-                .withHash("test")
-                .withDeliveredAt(DateTime.now())
-                .withCount(1)
-                .withHashType("type")
-                .withId("test");
-        List<ObjectNode> testPostJsonData = Lists.newArrayList();
-        testPostJsonData.add(mapper.createObjectNode().put("valid", "true"));
-        testPostJsonMeta.setData(testPostJsonData);
-        String testPostJsonEntity = mapper.writeValueAsString(testPostJsonMeta);
-        Assert.assertEquals(200, genericWebhookResource.json_meta(null, testPostJsonEntity).getStatus());
+  @Test
+  public void testPostJsonNewLine() {
+    Assert.assertEquals(200, genericWebhookResource.json_new_line(null, "{}").getStatus());
+    Assert.assertEquals(400, genericWebhookResource.json_new_line(null, "notvalid").getStatus());
+    Assert.assertEquals(200, genericWebhookResource.json_new_line(null, "{\"valid\":\"true\"}").getStatus());
+    Assert.assertEquals(200, genericWebhookResource.json_new_line(null, "{\"valid\":\"true\"}\n{\"valid\":\"true\"}\r{\"valid\":\"true\"}").getStatus());
+  }
 
-    };
+  @Test
+  public void testPostJsonMeta() throws JsonProcessingException {
+    Assert.assertEquals(200, genericWebhookResource.json_meta(null, "{}").getStatus());
+    Assert.assertEquals(400, genericWebhookResource.json_meta(null, "notvalid").getStatus());
+    GenericWebhookData testPostJsonMeta = new GenericWebhookData()
+        .withHash("test")
+        .withDeliveredAt(DateTime.now())
+        .withCount(1)
+        .withHashType("type")
+        .withId("test");
+    List<ObjectNode> testPostJsonData = Lists.newArrayList();
+    testPostJsonData.add(mapper.createObjectNode().put("valid", "true"));
+    testPostJsonMeta.setData(testPostJsonData);
+    String testPostJsonEntity = mapper.writeValueAsString(testPostJsonMeta);
+    Assert.assertEquals(200, genericWebhookResource.json_meta(null, testPostJsonEntity).getStatus());
 
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/StreamsApplicationIT.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/StreamsApplicationIT.java b/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/StreamsApplicationIT.java
index 1ab0c2e..788a523 100644
--- a/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/StreamsApplicationIT.java
+++ b/streams-runtimes/streams-runtime-dropwizard/src/test/java/org/apache/streams/dropwizard/test/StreamsApplicationIT.java
@@ -32,17 +32,17 @@ import java.net.URL;
  */
 public class StreamsApplicationIT {
 
-    @Before
-    public void setupTest() throws Exception {
-        String[] testArgs = Lists.newArrayList("server", "src/test/resources/configuration.yml").toArray(new String[2]);
-        TestStreamsApplication.main(testArgs);
-    }
+  @Before
+  public void setupTest() throws Exception {
+    String[] testArgs = Lists.newArrayList("server", "src/test/resources/configuration.yml").toArray(new String[2]);
+    TestStreamsApplication.main(testArgs);
+  }
 
-    @Test
-    public void testApplicationStarted() throws Exception {
+  @Test
+  public void testApplicationStarted() throws Exception {
 
-        final URL url = new URL("http://localhost:8003/admin/ping");
-        final String response = new BufferedReader(new InputStreamReader(url.openStream())).readLine();
-        Assert.assertEquals("pong", response);
-    }
+    final URL url = new URL("http://localhost:8003/admin/ping");
+    final String response = new BufferedReader(new InputStreamReader(url.openStream())).readLine();
+    Assert.assertEquals("pong", response);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/InvalidStreamException.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/InvalidStreamException.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/InvalidStreamException.java
index 3511b3d..c44a8e4 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/InvalidStreamException.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/InvalidStreamException.java
@@ -23,19 +23,19 @@ package org.apache.streams.local.builders;
  */
 public class InvalidStreamException extends RuntimeException {
 
-    public InvalidStreamException() {
-        super();
-    }
+  public InvalidStreamException() {
+    super();
+  }
 
-    public InvalidStreamException(String s) {
-        super(s);
-    }
+  public InvalidStreamException(String string) {
+    super(string);
+  }
 
-    public InvalidStreamException(String s, Throwable throwable) {
-        super(s, throwable);
-    }
+  public InvalidStreamException(String string, Throwable throwable) {
+    super(string, throwable);
+  }
 
-    public InvalidStreamException(Throwable throwable) {
-        super(throwable);
-    }
+  public InvalidStreamException(Throwable throwable) {
+    super(throwable);
+  }
 }


[39/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistWriter.java b/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistWriter.java
index 0a39461..b61a364 100644
--- a/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistWriter.java
+++ b/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistWriter.java
@@ -16,8 +16,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.amazon.kinesis;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.converter.TypeConverterUtil;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistWriter;
+
 import com.amazonaws.ClientConfiguration;
 import com.amazonaws.Protocol;
 import com.amazonaws.auth.AWSCredentials;
@@ -30,11 +37,7 @@ import com.amazonaws.services.kinesis.model.PutRecordResult;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Strings;
 import com.typesafe.config.Config;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.converter.TypeConverterUtil;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistWriter;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -48,87 +51,94 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
 /**
- * Created by sblackmon on 9/2/15.
+ * KinesisPersistWriter writes documents to kinesis.
  */
 public class KinesisPersistWriter implements StreamsPersistWriter {
 
-    public final static String STREAMS_ID = "KinesisPersistWriter";
+  public static final String STREAMS_ID = "KinesisPersistWriter";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(KinesisPersistWriter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(KinesisPersistWriter.class);
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    private ObjectMapper mapper = new ObjectMapper();
+  private ObjectMapper mapper = new ObjectMapper();
 
-    private KinesisWriterConfiguration config;
+  private KinesisWriterConfiguration config;
 
-    private List<String> streamName;
+  private List<String> streamName;
 
-    private ExecutorService executor;
+  private ExecutorService executor;
 
-    protected AmazonKinesisClient client;
-
-    public KinesisPersistWriter() {
-        Config config = StreamsConfigurator.config.getConfig("kinesis");
-        this.config = new ComponentConfigurator<>(KinesisWriterConfiguration.class).detectConfiguration(config);
-        this.persistQueue  = new ConcurrentLinkedQueue<StreamsDatum>();
-    }
+  protected AmazonKinesisClient client;
 
-    public KinesisPersistWriter(KinesisWriterConfiguration config) {
-        this.config = config;
-        this.persistQueue  = new ConcurrentLinkedQueue<StreamsDatum>();
-    }
+  /**
+   * KinesisPersistWriter constructor - resolves KinesisWriterConfiguration from JVM 'kinesis'.
+   */
+  public KinesisPersistWriter() {
+    Config config = StreamsConfigurator.config.getConfig("kinesis");
+    this.config = new ComponentConfigurator<>(KinesisWriterConfiguration.class).detectConfiguration(config);
+    this.persistQueue  = new ConcurrentLinkedQueue<StreamsDatum>();
+  }
 
-    public void setConfig(KinesisWriterConfiguration config) {
-        this.config = config;
-    }
+  /**
+   * KinesisPersistWriter constructor - uses provided KinesisWriterConfiguration.
+   */
+  public KinesisPersistWriter(KinesisWriterConfiguration config) {
+    this.config = config;
+    this.persistQueue  = new ConcurrentLinkedQueue<StreamsDatum>();
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public void setConfig(KinesisWriterConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public void write(StreamsDatum entry) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        String document = (String) TypeConverterUtil.getInstance().convert(entry.getDocument(), String.class);
+  @Override
+  public void write(StreamsDatum entry) {
 
-        PutRecordRequest putRecordRequest = new PutRecordRequest()
-                .withStreamName(config.getStream())
-                .withPartitionKey(entry.getId())
-                .withData(ByteBuffer.wrap(document.getBytes()));
+    String document = (String) TypeConverterUtil.getInstance().convert(entry.getDocument(), String.class);
 
-        PutRecordResult putRecordResult = client.putRecord(putRecordRequest);
+    PutRecordRequest putRecordRequest = new PutRecordRequest()
+        .withStreamName(config.getStream())
+        .withPartitionKey(entry.getId())
+        .withData(ByteBuffer.wrap(document.getBytes()));
 
-        entry.setSequenceid(new BigInteger(putRecordResult.getSequenceNumber()));
+    PutRecordResult putRecordResult = client.putRecord(putRecordRequest);
 
-        LOGGER.debug("Wrote {}", entry);
-    }
+    entry.setSequenceid(new BigInteger(putRecordResult.getSequenceNumber()));
 
-    @Override
-    public void prepare(Object configurationObject) {
-        // Connect to Kinesis
-        synchronized (this) {
-            // Create the credentials Object
-            AWSCredentials credentials = new BasicAWSCredentials(config.getKey(), config.getSecretKey());
+    LOGGER.debug("Wrote {}", entry);
+  }
 
-            ClientConfiguration clientConfig = new ClientConfiguration();
-            clientConfig.setProtocol(Protocol.valueOf(config.getProtocol().toString()));
+  @Override
+  public void prepare(Object configurationObject) {
+    // Connect to Kinesis
+    synchronized (this) {
+      // Create the credentials Object
+      AWSCredentials credentials = new BasicAWSCredentials(config.getKey(), config.getSecretKey());
 
-            this.client = new AmazonKinesisClient(credentials, clientConfig);
-            if (!Strings.isNullOrEmpty(config.getRegion()))
-                this.client.setRegion(Region.getRegion(Regions.fromName(config.getRegion())));
-        }
-        executor = Executors.newSingleThreadExecutor();
+      ClientConfiguration clientConfig = new ClientConfiguration();
+      clientConfig.setProtocol(Protocol.valueOf(config.getProtocol().toString()));
 
+      this.client = new AmazonKinesisClient(credentials, clientConfig);
+      if (!Strings.isNullOrEmpty(config.getRegion())) {
+        this.client.setRegion(Region.getRegion(Regions.fromName(config.getRegion())));
+      }
     }
+    executor = Executors.newSingleThreadExecutor();
+
+  }
 
-    @Override
-    public void cleanUp() {
-        try {
-            executor.awaitTermination(5, TimeUnit.SECONDS);
-        } catch (InterruptedException e) {
-            LOGGER.debug("Interrupted! ", e);
-        }
+  @Override
+  public void cleanUp() {
+    try {
+      executor.awaitTermination(5, TimeUnit.SECONDS);
+    } catch (InterruptedException ex) {
+      LOGGER.debug("Interrupted! ", ex);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3ObjectInputStreamWrapper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3ObjectInputStreamWrapper.java b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3ObjectInputStreamWrapper.java
index c13314d..f34782a 100644
--- a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3ObjectInputStreamWrapper.java
+++ b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3ObjectInputStreamWrapper.java
@@ -15,10 +15,12 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.s3;
 
 import com.amazonaws.services.s3.model.S3Object;
 import com.amazonaws.services.s3.model.S3ObjectInputStream;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -34,125 +36,129 @@ import java.io.InputStream;
  * and transfer the entire file. If you are only reading the first 50 lines of a 5,000,000 line file
  * this becomes problematic.
  *
+ * <p/>
  * This class operates as a wrapper to fix the aforementioned nuances.
  *
+ * <p/>
  * Reference:
  * http://stackoverflow.com/questions/17782937/connectionpooltimeoutexception-when-iterating-objects-in-s3
  */
-public class S3ObjectInputStreamWrapper extends InputStream
-{
-    private final static Logger LOGGER = LoggerFactory.getLogger(S3ObjectInputStreamWrapper.class);
-
-    private final S3Object s3Object;
-    private final S3ObjectInputStream is;
-    private boolean isClosed = false;
-
-    /**
-     * Create an input stream safely from
-     * @param s3Object
-     */
-    public S3ObjectInputStreamWrapper(S3Object s3Object) {
-        this.s3Object = s3Object;
-        this.is = this.s3Object.getObjectContent();
-    }
-
-    public int hashCode() {
-        return this.is.hashCode();
-    }
-
-    public boolean equals(Object obj) {
-        return this.is.equals(obj);
-    }
-
-    public String toString() {
-        return this.is.toString();
-    }
-
-    public int read() throws IOException {
-        return this.is.read();
-    }
-
-    public int read(byte[] b) throws IOException {
-        return this.is.read(b);
-    }
-
-    public int read(byte[] b, int off, int len) throws IOException {
-        return this.is.read(b, off, len);
-    }
-
-    public long skip(long n) throws IOException {
-        return this.is.skip(n);
-    }
-
-    public int available() throws IOException {
-        return this.is.available();
-    }
-
-    public boolean markSupported() {
-        return this.is.markSupported();
-    }
-
-    public synchronized void mark(int readlimit) {
-        this.is.mark(readlimit);
-    }
-
-    public synchronized void reset() throws IOException {
-        this.is.reset();
-    }
-
-    public void close() throws IOException {
-        ensureEverythingIsReleased();
-    }
+public class S3ObjectInputStreamWrapper extends InputStream {
 
-    public void ensureEverythingIsReleased() {
-        if(this.isClosed)
-            return;
-
-
-        try {
-            // ensure that the S3 Object is closed properly.
-            this.s3Object.close();
-        } catch(Throwable e) {
-            LOGGER.warn("Problem Closing the S3Object[{}]: {}", s3Object.getKey(), e.getMessage());
-        }
-
-
-        try {
-            // Abort the stream
-            this.is.abort();
-        }
-        catch(Throwable e) {
-            LOGGER.warn("Problem Aborting S3Object[{}]: {}", s3Object.getKey(), e.getMessage());
-        }
-
-        // close the input Stream Safely
-        closeSafely(this.is);
-
-        // This corrects the issue with Open HTTP connections
-        closeSafely(this.s3Object);
-        this.isClosed = true;
-    }
-
-    private static void closeSafely(Closeable is) {
-        try {
-            if(is != null)
-                is.close();
-        } catch(Exception e) {
-            e.printStackTrace();
-            LOGGER.warn("S3InputStreamWrapper: Issue Closing Closeable - {}", e.getMessage());
-        }
-    }
+  private static final Logger LOGGER = LoggerFactory.getLogger(S3ObjectInputStreamWrapper.class);
+
+  private final S3Object s3Object;
+  private final S3ObjectInputStream is;
+  private boolean isClosed = false;
 
-    protected void finalize( ) throws Throwable
-    {
-        try {
-            // If there is an accidental leak where the user did not close, call this on the classes destructor
-            ensureEverythingIsReleased();
-            super.finalize();
-        } catch(Exception e) {
-            // this should never be called, just being very cautious
-            LOGGER.warn("S3InputStreamWrapper: Issue Releasing Connections on Finalize - {}", e.getMessage());
-        }
+  /**
+   * Create an input stream safely.
+   * @param s3Object s3Object
+   */
+  public S3ObjectInputStreamWrapper(S3Object s3Object) {
+    this.s3Object = s3Object;
+    this.is = this.s3Object.getObjectContent();
+  }
+
+  public int hashCode() {
+    return this.is.hashCode();
+  }
+
+  public boolean equals(Object obj) {
+    return this.is.equals(obj);
+  }
+
+  public String toString() {
+    return this.is.toString();
+  }
+
+  public int read() throws IOException {
+    return this.is.read();
+  }
+
+  public int read(byte[] byt) throws IOException {
+    return this.is.read(byt);
+  }
+
+  public int read(byte[] byt, int off, int len) throws IOException {
+    return this.is.read(byt, off, len);
+  }
+
+  public long skip(long skip) throws IOException {
+    return this.is.skip(skip);
+  }
+
+  public int available() throws IOException {
+    return this.is.available();
+  }
+
+  public boolean markSupported() {
+    return this.is.markSupported();
+  }
+
+  public synchronized void mark(int readlimit) {
+    this.is.mark(readlimit);
+  }
+
+  public synchronized void reset() throws IOException {
+    this.is.reset();
+  }
+
+  public void close() throws IOException {
+    ensureEverythingIsReleased();
+  }
+
+  /**
+   * ensureEverythingIsReleased as part of close process.
+   */
+  public void ensureEverythingIsReleased() {
+    if (this.isClosed) {
+      return;
+    }
+
+    try {
+      // ensure that the S3 Object is closed properly.
+      this.s3Object.close();
+    } catch (Throwable ex) {
+      LOGGER.warn("Problem Closing the S3Object[{}]: {}", s3Object.getKey(), ex.getMessage());
+    }
+
+
+    try {
+      // Abort the stream
+      this.is.abort();
+    } catch (Throwable ex) {
+      LOGGER.warn("Problem Aborting S3Object[{}]: {}", s3Object.getKey(), ex.getMessage());
+    }
+
+    // close the input Stream Safely
+    closeSafely(this.is);
+
+    // This corrects the issue with Open HTTP connections
+    closeSafely(this.s3Object);
+    this.isClosed = true;
+  }
+
+  private static void closeSafely(Closeable is) {
+    try {
+      if (is != null) {
+        is.close();
+      }
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      LOGGER.warn("S3InputStreamWrapper: Issue Closing Closeable - {}", ex.getMessage());
+    }
+  }
+
+  protected void finalize() throws Throwable {
+    try {
+      // If there is an accidental leak where the user did not close, call this on the classes destructor
+      ensureEverythingIsReleased();
+      super.finalize();
+    } catch (Exception ex) {
+      // this should never be called, just being very cautious
+      LOGGER.warn("S3InputStreamWrapper: Issue Releasing Connections on Finalize - {}", ex.getMessage());
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3OutputStreamWrapper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3OutputStreamWrapper.java b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3OutputStreamWrapper.java
index 08fc774..e8ca0c7 100644
--- a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3OutputStreamWrapper.java
+++ b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3OutputStreamWrapper.java
@@ -15,17 +15,23 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.s3;
 
 import com.amazonaws.services.s3.AmazonS3Client;
 import com.amazonaws.services.s3.model.ObjectMetadata;
 import com.amazonaws.services.s3.transfer.TransferManager;
 import com.amazonaws.services.s3.transfer.Upload;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.*;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
 import java.util.Map;
 
 /**
@@ -33,112 +39,109 @@ import java.util.Map;
  * in memory ByteArrayOutPutStream before it is finally written to Amazon S3. The size the file is allowed to become
  * is directly controlled by the S3PersistWriter.
  */
-public class S3OutputStreamWrapper extends OutputStream
-{
-    private static final Logger LOGGER = LoggerFactory.getLogger(S3OutputStreamWrapper.class);
-
-    private final AmazonS3Client amazonS3Client;
-    private final String bucketName;
-    private final String path;
-    private final String fileName;
-    private ByteArrayOutputStream outputStream;
-    private final Map<String, String> metaData;
-    private boolean isClosed = false;
-
-    /**
-     * Create an OutputStream Wrapper
-     * @param amazonS3Client
-     * The Amazon S3 Client which will be handling the file
-     * @param bucketName
-     * The Bucket Name you are wishing to write to.
-     * @param path
-     * The path where the object will live
-     * @param fileName
-     * The fileName you ware wishing to write.
-     * @param metaData
-     * Any meta data that is to be written along with the object
-     * @throws IOException
-     * If there is an issue creating the stream, this
-     */
-    public S3OutputStreamWrapper(AmazonS3Client amazonS3Client, String bucketName, String path, String fileName, Map<String, String> metaData) throws IOException {
-        this.amazonS3Client = amazonS3Client;
-        this.bucketName = bucketName;
-        this.path = path;
-        this.fileName = fileName;
-        this.metaData = metaData;
-        this.outputStream = new ByteArrayOutputStream();
+public class S3OutputStreamWrapper extends OutputStream {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(S3OutputStreamWrapper.class);
+
+  private final AmazonS3Client amazonS3Client;
+  private final String bucketName;
+  private final String path;
+  private final String fileName;
+  private ByteArrayOutputStream outputStream;
+  private final Map<String, String> metaData;
+  private boolean isClosed = false;
+
+  /**
+   * Create an OutputStream Wrapper
+   * @param amazonS3Client
+   * The Amazon S3 Client which will be handling the file
+   * @param bucketName
+   * The Bucket Name you are wishing to write to.
+   * @param path
+   * The path where the object will live
+   * @param fileName
+   * The fileName you ware wishing to write.
+   * @param metaData
+   * Any meta data that is to be written along with the object
+   * @throws IOException
+   * If there is an issue creating the stream, this
+   */
+  public S3OutputStreamWrapper(AmazonS3Client amazonS3Client, String bucketName, String path, String fileName, Map<String, String> metaData) throws IOException {
+    this.amazonS3Client = amazonS3Client;
+    this.bucketName = bucketName;
+    this.path = path;
+    this.fileName = fileName;
+    this.metaData = metaData;
+    this.outputStream = new ByteArrayOutputStream();
+  }
+
+  public void write(int byt) throws IOException {
+    this.outputStream.write(byt);
+  }
+
+  public void write(byte[] byt) throws IOException {
+    this.outputStream.write(byt);
+  }
+
+  public void write(byte[] byt, int off, int len) throws IOException {
+    this.outputStream.write(byt, off, len);
+  }
+
+  public void flush() throws IOException {
+    this.outputStream.flush();
+  }
+
+  /**
+   * Whenever the output stream is closed we are going to kick the ByteArrayOutputStream off to Amazon S3.
+   * @throws IOException
+   * Exception thrown from the FileOutputStream
+   */
+  public void close() throws IOException {
+    if (!isClosed) {
+      try {
+        this.addFile();
+        this.outputStream.close();
+        this.outputStream = null;
+      } catch (Exception ex) {
+        ex.printStackTrace();
+        LOGGER.warn("There was an error adding the temporaryFile to S3");
+      } finally {
+        // we are done here.
+        this.isClosed = true;
+      }
     }
+  }
 
-    public void write(int b) throws IOException {
-        this.outputStream.write(b);
-    }
+  private void addFile() throws Exception {
 
-    public void write(byte[] b) throws IOException {
-        this.outputStream.write(b);
-    }
+    InputStream is = new ByteArrayInputStream(this.outputStream.toByteArray());
+    int contentLength = outputStream.size();
 
-    public void write(byte[] b, int off, int len) throws IOException {
-        this.outputStream.write(b, off, len);
-    }
+    TransferManager transferManager = new TransferManager(amazonS3Client);
+    ObjectMetadata metadata = new ObjectMetadata();
+    metadata.setExpirationTime(DateTime.now().plusDays(365 * 3).toDate());
+    metadata.setContentLength(contentLength);
 
-    public void flush() throws IOException {
-        this.outputStream.flush();
-    }
+    metadata.addUserMetadata("writer", "org.apache.streams");
 
-    /**
-     * Whenever the output stream is closed we are going to kick the ByteArrayOutputStream off to Amazon S3.
-     * @throws IOException
-     * Exception thrown from the FileOutputStream
-     */
-    public void close() throws IOException {
-        if(!isClosed)
-        {
-            try
-            {
-                this.addFile();
-                this.outputStream.close();
-                this.outputStream = null;
-            }
-            catch(Exception e) {
-                e.printStackTrace();
-                LOGGER.warn("There was an error adding the temporaryFile to S3");
-            }
-            finally {
-                // we are done here.
-                this.isClosed = true;
-            }
-        }
+    for (String s : metaData.keySet()) {
+      metadata.addUserMetadata(s, metaData.get(s));
     }
 
-    private void addFile() throws Exception {
-
-        InputStream is = new ByteArrayInputStream(this.outputStream.toByteArray());
-        int contentLength = outputStream.size();
+    String fileNameToWrite = path + fileName;
+    Upload upload = transferManager.upload(bucketName, fileNameToWrite, is, metadata);
+    try {
+      upload.waitForUploadResult();
 
-        TransferManager transferManager = new TransferManager(amazonS3Client);
-        ObjectMetadata metadata = new ObjectMetadata();
-        metadata.setExpirationTime(DateTime.now().plusDays(365*3).toDate());
-        metadata.setContentLength(contentLength);
-
-        metadata.addUserMetadata("writer", "org.apache.streams");
-
-        for(String s : metaData.keySet())
-            metadata.addUserMetadata(s, metaData.get(s));
-
-        String fileNameToWrite = path + fileName;
-        Upload upload = transferManager.upload(bucketName, fileNameToWrite, is, metadata);
-        try {
-            upload.waitForUploadResult();
-
-            is.close();
-            transferManager.shutdownNow(false);
-            LOGGER.info("S3 File Close[{} kb] - {}", contentLength / 1024, path + fileName);
-        } catch (Exception e) {
-            // No Op
-        }
+      is.close();
+      transferManager.shutdownNow(false);
+      LOGGER.info("S3 File Close[{} kb] - {}", contentLength / 1024, path + fileName);
+    } catch (Exception ignored) {
+      LOGGER.trace("Ignoring", ignored);
+    }
 
 
-    }
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReader.java b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReader.java
index 702df71..753b439 100644
--- a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReader.java
+++ b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReader.java
@@ -15,8 +15,16 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.s3;
 
+import org.apache.streams.converter.LineReadWriteUtil;
+import org.apache.streams.core.DatumStatusCountable;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistReader;
+import org.apache.streams.core.StreamsResultSet;
+
 import com.amazonaws.ClientConfiguration;
 import com.amazonaws.Protocol;
 import com.amazonaws.auth.AWSCredentials;
@@ -31,12 +39,7 @@ import com.amazonaws.services.s3.model.S3ObjectSummary;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Strings;
 import com.google.common.collect.Queues;
-import org.apache.streams.converter.LineReadWriteUtil;
-import org.apache.streams.core.DatumStatusCountable;
-import org.apache.streams.core.DatumStatusCounter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistReader;
-import org.apache.streams.core.StreamsResultSet;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -50,163 +53,168 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
 
+/**
+ * S3PersistReader reads documents from s3.
+ */
 public class S3PersistReader implements StreamsPersistReader, DatumStatusCountable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(S3PersistReader.class);
-    public final static String STREAMS_ID = "S3PersistReader";
-    protected final static char DELIMITER = '\t';
-
-    private S3ReaderConfiguration s3ReaderConfiguration;
-    private AmazonS3Client amazonS3Client;
-    private ObjectMapper mapper = new ObjectMapper();
-    protected LineReadWriteUtil lineReaderUtil;
-    private Collection<String> files;
-    private ExecutorService executor;
-    protected volatile Queue<StreamsDatum> persistQueue;
-
-    protected DatumStatusCounter countersTotal = new DatumStatusCounter();
-    protected DatumStatusCounter countersCurrent = new DatumStatusCounter();
-    private Future<?> task;
-
-    public AmazonS3Client getAmazonS3Client() {
-        return this.amazonS3Client;
-    }
-
-    public S3ReaderConfiguration getS3ReaderConfiguration() {
-        return this.s3ReaderConfiguration;
-    }
-
-    public String getBucketName() {
-        return this.s3ReaderConfiguration.getBucket();
-    }
-
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
-
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
-
-    @Override
-    public boolean isRunning() {
-        return !task.isDone() && !task.isCancelled();
-    }
-
-    public DatumStatusCounter getDatumStatusCounter() {
-        return countersTotal;
+  private static final Logger LOGGER = LoggerFactory.getLogger(S3PersistReader.class);
+  public static final String STREAMS_ID = "S3PersistReader";
+  protected static final char DELIMITER = '\t';
+
+  private S3ReaderConfiguration s3ReaderConfiguration;
+  private AmazonS3Client amazonS3Client;
+  private ObjectMapper mapper = new ObjectMapper();
+  protected LineReadWriteUtil lineReaderUtil;
+  private Collection<String> files;
+  private ExecutorService executor;
+  protected volatile Queue<StreamsDatum> persistQueue;
+
+  protected DatumStatusCounter countersTotal = new DatumStatusCounter();
+  protected DatumStatusCounter countersCurrent = new DatumStatusCounter();
+  private Future<?> task;
+
+  public AmazonS3Client getAmazonS3Client() {
+    return this.amazonS3Client;
+  }
+
+  public S3ReaderConfiguration getS3ReaderConfiguration() {
+    return this.s3ReaderConfiguration;
+  }
+
+  public String getBucketName() {
+    return this.s3ReaderConfiguration.getBucket();
+  }
+
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return !task.isDone() && !task.isCancelled();
+  }
+
+  public DatumStatusCounter getDatumStatusCounter() {
+    return countersTotal;
+  }
+
+  public Collection<String> getFiles() {
+    return this.files;
+  }
+
+  public S3PersistReader(S3ReaderConfiguration s3ReaderConfiguration) {
+    this.s3ReaderConfiguration = s3ReaderConfiguration;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+
+    lineReaderUtil = LineReadWriteUtil.getInstance(s3ReaderConfiguration);
+    // Connect to S3
+    synchronized (this) {
+      // Create the credentials Object
+      AWSCredentials credentials = new BasicAWSCredentials(s3ReaderConfiguration.getKey(), s3ReaderConfiguration.getSecretKey());
+
+      ClientConfiguration clientConfig = new ClientConfiguration();
+      clientConfig.setProtocol(Protocol.valueOf(s3ReaderConfiguration.getProtocol().toString()));
+
+      // We do not want path style access
+      S3ClientOptions clientOptions = new S3ClientOptions();
+      clientOptions.setPathStyleAccess(false);
+
+      this.amazonS3Client = new AmazonS3Client(credentials, clientConfig);
+      if ( !Strings.isNullOrEmpty(s3ReaderConfiguration.getRegion())) {
+        this.amazonS3Client.setRegion(Region.getRegion(Regions.fromName(s3ReaderConfiguration.getRegion())));
+      }
+      this.amazonS3Client.setS3ClientOptions(clientOptions);
     }
 
-    public Collection<String> getFiles() {
-        return this.files;
-    }
+    final ListObjectsRequest request = new ListObjectsRequest()
+        .withBucketName(this.s3ReaderConfiguration.getBucket())
+        .withPrefix(s3ReaderConfiguration.getReaderPath())
+        .withMaxKeys(500);
+
+
+    ObjectListing listing = this.amazonS3Client.listObjects(request);
+
+    this.files = new ArrayList<String>();
+
+    /**
+     * If you can list files that are in this path, then you must be dealing with a directory
+     * if you cannot list files that are in this path, then you are most likely dealing with
+     * a simple file.
+     */
+    boolean hasCommonPrefixes = listing.getCommonPrefixes().size() > 0 ? true : false;
+    boolean hasObjectSummaries = listing.getObjectSummaries().size() > 0 ? true : false;
+
+    if (hasCommonPrefixes || hasObjectSummaries) {
+      // Handle the 'directory' use case
+      do {
+        if (hasCommonPrefixes) {
+          for (String file : listing.getCommonPrefixes()) {
+            this.files.add(file);
+          }
+        } else {
+          for (final S3ObjectSummary objectSummary : listing.getObjectSummaries()) {
+            this.files.add(objectSummary.getKey());
+          }
+        }
 
-    public S3PersistReader(S3ReaderConfiguration s3ReaderConfiguration) {
-        this.s3ReaderConfiguration = s3ReaderConfiguration;
+        // get the next batch.
+        listing = this.amazonS3Client.listNextBatchOfObjects(listing);
+      }
+      while (listing.isTruncated());
+    } else {
+      // handle the single file use-case
+      this.files.add(s3ReaderConfiguration.getReaderPath());
     }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+    if (this.files.size() <= 0) {
+      LOGGER.error("There are no files to read");
     }
 
-    public void prepare(Object configurationObject) {
-
-        lineReaderUtil = LineReadWriteUtil.getInstance(s3ReaderConfiguration);
-        // Connect to S3
-        synchronized (this)
-        {
-            // Create the credentials Object
-            AWSCredentials credentials = new BasicAWSCredentials(s3ReaderConfiguration.getKey(), s3ReaderConfiguration.getSecretKey());
-
-            ClientConfiguration clientConfig = new ClientConfiguration();
-            clientConfig.setProtocol(Protocol.valueOf(s3ReaderConfiguration.getProtocol().toString()));
-
-            // We do not want path style access
-            S3ClientOptions clientOptions = new S3ClientOptions();
-            clientOptions.setPathStyleAccess(false);
+    this.persistQueue = Queues.synchronizedQueue(new LinkedBlockingQueue<StreamsDatum>(10000));
+    this.executor = Executors.newSingleThreadExecutor();
+  }
 
-            this.amazonS3Client = new AmazonS3Client(credentials, clientConfig);
-            if( !Strings.isNullOrEmpty(s3ReaderConfiguration.getRegion()))
-                this.amazonS3Client.setRegion(Region.getRegion(Regions.fromName(s3ReaderConfiguration.getRegion())));
-            this.amazonS3Client.setS3ClientOptions(clientOptions);
-        }
-
-        final ListObjectsRequest request = new ListObjectsRequest()
-                .withBucketName(this.s3ReaderConfiguration.getBucket())
-                .withPrefix(s3ReaderConfiguration.getReaderPath())
-                .withMaxKeys(500);
-
-
-        ObjectListing listing = this.amazonS3Client.listObjects(request);
-
-        this.files = new ArrayList<String>();
-
-        /**
-         * If you can list files that are in this path, then you must be dealing with a directory
-         * if you cannot list files that are in this path, then you are most likely dealing with
-         * a simple file.
-         */
-        boolean hasCommonPrefixes = listing.getCommonPrefixes().size() > 0 ? true : false;
-        boolean hasObjectSummaries = listing.getObjectSummaries().size() > 0 ? true : false;
-
-        if(hasCommonPrefixes || hasObjectSummaries) {
-            // Handle the 'directory' use case
-            do
-            {
-                if(hasCommonPrefixes) {
-                    for (String file : listing.getCommonPrefixes()) {
-                        this.files.add(file);
-                    }
-                } else {
-                    for(final S3ObjectSummary objectSummary : listing.getObjectSummaries()) {
-                        this.files.add(objectSummary.getKey());
-                    }
-                }
-
-                // get the next batch.
-                listing = this.amazonS3Client.listNextBatchOfObjects(listing);
-            } while (listing.isTruncated());
-        }
-        else {
-            // handle the single file use-case
-            this.files.add(s3ReaderConfiguration.getReaderPath());
-        }
-
-        if(this.files.size() <= 0)
-            LOGGER.error("There are no files to read");
-
-        this.persistQueue = Queues.synchronizedQueue(new LinkedBlockingQueue<StreamsDatum>(10000));
-        this.executor = Executors.newSingleThreadExecutor();
-    }
-
-    public void cleanUp() {
-        // no Op
-    }
+  public void cleanUp() {
+    // no Op
+  }
 
-    public StreamsResultSet readAll() {
-        startStream();
-        return new StreamsResultSet(persistQueue);
-    }
+  public StreamsResultSet readAll() {
+    startStream();
+    return new StreamsResultSet(persistQueue);
+  }
 
-    public void startStream() {
-        LOGGER.debug("startStream");
-        task = executor.submit(new S3PersistReaderTask(this));
-    }
+  public void startStream() {
+    LOGGER.debug("startStream");
+    task = executor.submit(new S3PersistReaderTask(this));
+  }
 
-    public StreamsResultSet readCurrent() {
+  @Override
+  public StreamsResultSet readCurrent() {
 
-        StreamsResultSet current;
+    StreamsResultSet current;
 
-        synchronized( S3PersistReader.class ) {
-            current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(persistQueue));
-            current.setCounter(new DatumStatusCounter());
-            current.getCounter().add(countersCurrent);
-            countersTotal.add(countersCurrent);
-            countersCurrent = new DatumStatusCounter();
-            persistQueue.clear();
-        }
-        return current;
+    synchronized ( S3PersistReader.class ) {
+      current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(persistQueue));
+      current.setCounter(new DatumStatusCounter());
+      current.getCounter().add(countersCurrent);
+      countersTotal.add(countersCurrent);
+      countersCurrent = new DatumStatusCounter();
+      persistQueue.clear();
     }
+    return current;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReaderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReaderTask.java b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReaderTask.java
index f2f5567..f0e9626 100644
--- a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReaderTask.java
+++ b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistReaderTask.java
@@ -15,12 +15,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.s3;
 
-import com.google.common.base.Strings;
 import org.apache.streams.core.DatumStatus;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.util.ComponentUtils;
+
+import com.google.common.base.Strings;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -28,57 +31,61 @@ import java.io.BufferedReader;
 import java.io.Closeable;
 import java.io.InputStreamReader;
 
+/**
+ * S3PersistReaderTask reads documents from s3 on behalf of
+ * @see org.apache.streams.s3.S3PersistReader
+ */
 public class S3PersistReaderTask implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(S3PersistReaderTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(S3PersistReaderTask.class);
 
-    private S3PersistReader reader;
+  private S3PersistReader reader;
 
-    public S3PersistReaderTask(S3PersistReader reader) {
-        this.reader = reader;
-    }
+  public S3PersistReaderTask(S3PersistReader reader) {
+    this.reader = reader;
+  }
 
-    @Override
-    public void run() {
-
-        for(String file : reader.getFiles()) {
-
-            // Create our buffered reader
-            S3ObjectInputStreamWrapper is = new S3ObjectInputStreamWrapper(reader.getAmazonS3Client().getObject(reader.getBucketName(), file));
-            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is));
-            LOGGER.info("Reading: {} ", file);
-
-            String line = "";
-            try {
-                while((line = bufferedReader.readLine()) != null) {
-                    if( !Strings.isNullOrEmpty(line) ) {
-                        reader.countersCurrent.incrementAttempt();
-                        StreamsDatum entry = reader.lineReaderUtil.processLine(line);
-                        ComponentUtils.offerUntilSuccess(entry, reader.persistQueue);
-                        reader.countersCurrent.incrementStatus(DatumStatus.SUCCESS);
-                    }
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-                LOGGER.warn(e.getMessage());
-                reader.countersCurrent.incrementStatus(DatumStatus.FAIL);
-            }
-
-            LOGGER.info("Completed:  " + file);
-
-            try {
-                closeSafely(file, is);
-            } catch (Exception e) {
-                LOGGER.error(e.getMessage());
-            }
+  @Override
+  public void run() {
+
+    for (String file : reader.getFiles()) {
+
+      // Create our buffered reader
+      S3ObjectInputStreamWrapper is = new S3ObjectInputStreamWrapper(reader.getAmazonS3Client().getObject(reader.getBucketName(), file));
+      BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is));
+      LOGGER.info("Reading: {} ", file);
+
+      String line = "";
+      try {
+        while ((line = bufferedReader.readLine()) != null) {
+          if ( !Strings.isNullOrEmpty(line) ) {
+            reader.countersCurrent.incrementAttempt();
+            StreamsDatum entry = reader.lineReaderUtil.processLine(line);
+            ComponentUtils.offerUntilSuccess(entry, reader.persistQueue);
+            reader.countersCurrent.incrementStatus(DatumStatus.SUCCESS);
+          }
         }
+      } catch (Exception ex) {
+        ex.printStackTrace();
+        LOGGER.warn(ex.getMessage());
+        reader.countersCurrent.incrementStatus(DatumStatus.FAIL);
+      }
+
+      LOGGER.info("Completed:  " + file);
+
+      try {
+        closeSafely(file, is);
+      } catch (Exception ex) {
+        LOGGER.error(ex.getMessage());
+      }
     }
+  }
 
-    private static void closeSafely(String file, Closeable closeable) {
-        try {
-            closeable.close();
-        } catch(Exception e) {
-            LOGGER.error("There was an issue closing file: {}", file);
-        }
+  private static void closeSafely(String file, Closeable closeable) {
+    try {
+      closeable.close();
+    } catch (Exception ex) {
+      LOGGER.error("There was an issue closing file: {}", file);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistWriter.java b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistWriter.java
index 3686f55..ef6e831 100644
--- a/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistWriter.java
+++ b/streams-contrib/streams-amazon-aws/streams-persist-s3/src/main/java/org/apache/streams/s3/S3PersistWriter.java
@@ -15,8 +15,19 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.s3;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.converter.LineReadWriteUtil;
+import org.apache.streams.core.DatumStatus;
+import org.apache.streams.core.DatumStatusCountable;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistWriter;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.amazonaws.ClientConfiguration;
 import com.amazonaws.Protocol;
 import com.amazonaws.auth.AWSCredentials;
@@ -28,15 +39,7 @@ import com.amazonaws.services.s3.S3ClientOptions;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.converter.LineReadWriteUtil;
-import org.apache.streams.core.DatumStatus;
-import org.apache.streams.core.DatumStatusCountable;
-import org.apache.streams.core.DatumStatusCounter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistWriter;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -53,239 +56,256 @@ import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
-public class S3PersistWriter implements StreamsPersistWriter, DatumStatusCountable
-{
-    public final static String STREAMS_ID = "S3PersistWriter";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(S3PersistWriter.class);
+/**
+ * S3PersistWriter writes documents to s3.
+ */
+public class S3PersistWriter implements StreamsPersistWriter, DatumStatusCountable {
 
-    private final static char DELIMITER = '\t';
+  public static final String STREAMS_ID = "S3PersistWriter";
 
-    private ObjectMapper objectMapper;
-    private AmazonS3Client amazonS3Client;
-    private S3WriterConfiguration s3WriterConfiguration;
-    private final List<String> writtenFiles = new ArrayList<String>();
-    protected LineReadWriteUtil lineWriterUtil;
+  private static final Logger LOGGER = LoggerFactory.getLogger(S3PersistWriter.class);
 
-    private final AtomicLong totalBytesWritten = new AtomicLong();
-    private AtomicLong bytesWrittenThisFile = new AtomicLong();
+  private static final char DELIMITER = '\t';
 
-    private final AtomicInteger totalRecordsWritten = new AtomicInteger();
-    private AtomicInteger fileLineCounter = new AtomicInteger();
+  private ObjectMapper objectMapper;
+  private AmazonS3Client amazonS3Client;
+  private S3WriterConfiguration s3WriterConfiguration;
+  private final List<String> writtenFiles = new ArrayList<String>();
+  protected LineReadWriteUtil lineWriterUtil;
 
-    private Map<String, String> objectMetaData = new HashMap<String, String>() {{
-        put("line[0]", "id");
-        put("line[1]", "timeStamp");
-        put("line[2]", "metaData");
-        put("line[3]", "document");
-    }};
+  private final AtomicLong totalBytesWritten = new AtomicLong();
+  private AtomicLong bytesWrittenThisFile = new AtomicLong();
 
-    private OutputStreamWriter currentWriter = null;
+  private final AtomicInteger totalRecordsWritten = new AtomicInteger();
+  private AtomicInteger fileLineCounter = new AtomicInteger();
 
-    public AmazonS3Client getAmazonS3Client() {
-        return this.amazonS3Client;
-    }
+  private static Map<String, String> objectMetaData = new HashMap<String, String>();
 
-    public S3WriterConfiguration getS3WriterConfiguration() {
-        return this.s3WriterConfiguration;
-    }
+  static {
+    objectMetaData.put("line[0]", "id");
+    objectMetaData.put("line[1]", "timeStamp");
+    objectMetaData.put("line[2]", "metaData");
+    objectMetaData.put("line[3]", "document");
+  }
 
-    public List<String> getWrittenFiles() {
-        return this.writtenFiles;
-    }
+  private OutputStreamWriter currentWriter = null;
 
-    public Map<String, String> getObjectMetaData() {
-        return this.objectMetaData;
-    }
+  public AmazonS3Client getAmazonS3Client() {
+    return this.amazonS3Client;
+  }
 
-    public ObjectMapper getObjectMapper() {
-        return this.objectMapper;
-    }
+  public S3WriterConfiguration getS3WriterConfiguration() {
+    return this.s3WriterConfiguration;
+  }
 
-    public void setObjectMapper(ObjectMapper mapper) {
-        this.objectMapper = mapper;
-    }
+  public List<String> getWrittenFiles() {
+    return this.writtenFiles;
+  }
 
-    public void setObjectMetaData(Map<String, String> val) {
-        this.objectMetaData = val;
-    }
+  public Map<String, String> getObjectMetaData() {
+    return this.objectMetaData;
+  }
 
-    public S3PersistWriter() {
-        this(new ComponentConfigurator<>(S3WriterConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("s3")));
-    }
+  public ObjectMapper getObjectMapper() {
+    return this.objectMapper;
+  }
 
-    public S3PersistWriter(S3WriterConfiguration s3WriterConfiguration) {
-        this.s3WriterConfiguration = s3WriterConfiguration;
-    }
+  public void setObjectMapper(ObjectMapper mapper) {
+    this.objectMapper = mapper;
+  }
 
-    /**
-     * Instantiator with a pre-existing amazonS3Client, this is used to help with re-use.
-     * @param amazonS3Client
-     * If you have an existing amazonS3Client, it wont' bother to create another one
-     * @param s3WriterConfiguration
-     * Configuration of the write paths and instructions are still required.
-     */
-    public S3PersistWriter(AmazonS3Client amazonS3Client, S3WriterConfiguration s3WriterConfiguration) {
-        this.amazonS3Client = amazonS3Client;
-        this.s3WriterConfiguration = s3WriterConfiguration;
-    }
+  public void setObjectMetaData(Map<String, String> val) {
+    this.objectMetaData = val;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public S3PersistWriter() {
+    this(new ComponentConfigurator<>(S3WriterConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("s3")));
+  }
 
-    @Override
-    public void write(StreamsDatum streamsDatum) {
-
-        synchronized (this) {
-            // Check to see if we need to reset the file that we are currently working with
-            if (this.currentWriter == null || ( this.bytesWrittenThisFile.get()  >= (this.s3WriterConfiguration.getMaxFileSize() * 1024 * 1024))) {
-                try {
-                    LOGGER.info("Resetting the file");
-                    this.currentWriter = resetFile();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-
-            String line = lineWriterUtil.convertResultToString(streamsDatum);
-
-            try {
-                this.currentWriter.write(line);
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-
-            // add the bytes we've written
-            int recordSize = line.getBytes().length;
-            this.totalBytesWritten.addAndGet(recordSize);
-            this.bytesWrittenThisFile.addAndGet(recordSize);
-
-            // increment the record count
-            this.totalRecordsWritten.incrementAndGet();
-            this.fileLineCounter.incrementAndGet();
-        }
+  public S3PersistWriter(S3WriterConfiguration s3WriterConfiguration) {
+    this.s3WriterConfiguration = s3WriterConfiguration;
+  }
 
-    }
+  /**
+   * Instantiator with a pre-existing amazonS3Client, this is used to help with re-use.
+   * @param amazonS3Client
+   * If you have an existing amazonS3Client, it wont' bother to create another one
+   * @param s3WriterConfiguration
+   * Configuration of the write paths and instructions are still required.
+   */
+  public S3PersistWriter(AmazonS3Client amazonS3Client, S3WriterConfiguration s3WriterConfiguration) {
+    this.amazonS3Client = amazonS3Client;
+    this.s3WriterConfiguration = s3WriterConfiguration;
+  }
 
-    public synchronized OutputStreamWriter resetFile() throws Exception {
-        // this will keep it thread safe, so we don't create too many files
-        if(this.fileLineCounter.get() == 0 && this.currentWriter != null)
-            return this.currentWriter;
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        closeAndDestroyWriter();
+  @Override
+  public void write(StreamsDatum streamsDatum) {
 
-        // Create the path for where the file is going to live.
+    synchronized (this) {
+      // Check to see if we need to reset the file that we are currently working with
+      if (this.currentWriter == null || ( this.bytesWrittenThisFile.get()  >= (this.s3WriterConfiguration.getMaxFileSize() * 1024 * 1024))) {
         try {
-            // generate a file name
-            String fileName = this.s3WriterConfiguration.getWriterFilePrefix() +
-                    (this.s3WriterConfiguration.getChunk() ? "/" : "-") + new Date().getTime() + ".tsv";
-
-            // create the output stream
-            OutputStream outputStream = new S3OutputStreamWrapper(this.amazonS3Client,
-                    this.s3WriterConfiguration.getBucket(),
-                    this.s3WriterConfiguration.getWriterPath(),
-                    fileName,
-                    this.objectMetaData);
-
-            // reset the counter
-            this.fileLineCounter = new AtomicInteger();
-            this.bytesWrittenThisFile = new AtomicLong();
-
-            // add this to the list of written files
-            writtenFiles.add(this.s3WriterConfiguration.getWriterPath() + fileName);
-
-            // Log that we are creating this file
-            LOGGER.info("File Created: Bucket[{}] - {}", this.s3WriterConfiguration.getBucket(), this.s3WriterConfiguration.getWriterPath() + fileName);
-
-            // return the output stream
-            return new OutputStreamWriter(outputStream);
-        } catch (Exception e) {
-            LOGGER.error(e.getMessage());
-            throw e;
+          LOGGER.info("Resetting the file");
+          this.currentWriter = resetFile();
+        } catch (Exception ex) {
+          ex.printStackTrace();
         }
-    }
+      }
 
-    private synchronized void closeAndDestroyWriter() {
-        // if there is a current writer, we must close it first.
-        if (this.currentWriter != null) {
-            this.safeFlush(this.currentWriter);
-            this.closeSafely(this.currentWriter);
-            this.currentWriter = null;
+      String line = lineWriterUtil.convertResultToString(streamsDatum);
 
-            // Logging of information to alert the user to the activities of this class
-            LOGGER.debug("File Closed: Records[{}] Bytes[{}] {} ", this.fileLineCounter.get(), this.bytesWrittenThisFile.get(), this.writtenFiles.get(this.writtenFiles.size()-1));
-        }
+      try {
+        this.currentWriter.write(line);
+      } catch (IOException ex) {
+        ex.printStackTrace();
+      }
+
+      // add the bytes we've written
+      int recordSize = line.getBytes().length;
+      this.totalBytesWritten.addAndGet(recordSize);
+      this.bytesWrittenThisFile.addAndGet(recordSize);
+
+      // increment the record count
+      this.totalRecordsWritten.incrementAndGet();
+      this.fileLineCounter.incrementAndGet();
     }
 
-    private synchronized void closeSafely(Writer writer)  {
-        if(writer != null) {
-            try {
-                writer.flush();
-                writer.close();
-            } catch(Exception e) {
-                // noOp
-            }
-            LOGGER.debug("File Closed");
-        }
+  }
+
+  /**
+   * Reset File when it's time to create a new file.
+   * @return OutputStreamWriter
+   * @throws Exception Exception
+   */
+  public synchronized OutputStreamWriter resetFile() throws Exception {
+    // this will keep it thread safe, so we don't create too many files
+    if (this.fileLineCounter.get() == 0 && this.currentWriter != null) {
+      return this.currentWriter;
     }
 
-    private void safeFlush(Flushable flushable) {
-        // This is wrapped with a ByteArrayOutputStream, so this is really safe.
-        if(flushable != null) {
-            try {
-                flushable.flush();
-            } catch(IOException e) {
-                // noOp
-            }
-        }
+    closeAndDestroyWriter();
+
+    // Create the path for where the file is going to live.
+    try {
+      // generate a file name
+      String fileName = this.s3WriterConfiguration.getWriterFilePrefix()
+          + (this.s3WriterConfiguration.getChunk() ? "/" : "-")
+          + new Date().getTime()
+          + ".tsv";
+
+      // create the output stream
+      OutputStream outputStream = new S3OutputStreamWrapper(this.amazonS3Client,
+          this.s3WriterConfiguration.getBucket(),
+          this.s3WriterConfiguration.getWriterPath(),
+          fileName,
+          this.objectMetaData);
+
+      // reset the counter
+      this.fileLineCounter = new AtomicInteger();
+      this.bytesWrittenThisFile = new AtomicLong();
+
+      // add this to the list of written files
+      writtenFiles.add(this.s3WriterConfiguration.getWriterPath() + fileName);
+
+      // Log that we are creating this file
+      LOGGER.info("File Created: Bucket[{}] - {}", this.s3WriterConfiguration.getBucket(), this.s3WriterConfiguration.getWriterPath() + fileName);
+
+      // return the output stream
+      return new OutputStreamWriter(outputStream);
+    } catch (Exception ex) {
+      LOGGER.error(ex.getMessage());
+      throw ex;
     }
+  }
+
+  private synchronized void closeAndDestroyWriter() {
+    // if there is a current writer, we must close it first.
+    if (this.currentWriter != null) {
+      this.safeFlush(this.currentWriter);
+      this.closeSafely(this.currentWriter);
+      this.currentWriter = null;
 
-    public void prepare(Object configurationObject) {
+      // Logging of information to alert the user to the activities of this class
+      LOGGER.debug("File Closed: Records[{}] Bytes[{}] {} ", this.fileLineCounter.get(), this.bytesWrittenThisFile.get(), this.writtenFiles.get(this.writtenFiles.size() - 1));
+    }
+  }
+
+  private synchronized void closeSafely(Writer writer)  {
+    if (writer != null) {
+      try {
+        writer.flush();
+        writer.close();
+      } catch (Exception ex) {
+        LOGGER.trace("closeSafely", ex);
+      }
+      LOGGER.debug("File Closed");
+    }
+  }
+
+  private void safeFlush(Flushable flushable) {
+    // This is wrapped with a ByteArrayOutputStream, so this is really safe.
+    if (flushable != null) {
+      try {
+        flushable.flush();
+      } catch (IOException ex) {
+        LOGGER.trace("safeFlush", ex);
+      }
+    }
+  }
 
-        lineWriterUtil = LineReadWriteUtil.getInstance(s3WriterConfiguration);
+  @Override
+  public void prepare(Object configurationObject) {
 
-        // Connect to S3
-        synchronized (this) {
+    lineWriterUtil = LineReadWriteUtil.getInstance(s3WriterConfiguration);
 
-            try {
-                // if the user has chosen to not set the object mapper, then set a default object mapper for them.
-                if (this.objectMapper == null)
-                    this.objectMapper = StreamsJacksonMapper.getInstance();
+    // Connect to S3
+    synchronized (this) {
 
-                // Create the credentials Object
-                if (this.amazonS3Client == null) {
-                    AWSCredentials credentials = new BasicAWSCredentials(s3WriterConfiguration.getKey(), s3WriterConfiguration.getSecretKey());
+      try {
+        // if the user has chosen to not set the object mapper, then set a default object mapper for them.
+        if (this.objectMapper == null) {
+          this.objectMapper = StreamsJacksonMapper.getInstance();
+        }
 
-                    ClientConfiguration clientConfig = new ClientConfiguration();
-                    clientConfig.setProtocol(Protocol.valueOf(s3WriterConfiguration.getProtocol().toString()));
+        // Create the credentials Object
+        if (this.amazonS3Client == null) {
+          AWSCredentials credentials = new BasicAWSCredentials(s3WriterConfiguration.getKey(), s3WriterConfiguration.getSecretKey());
 
-                    // We do not want path style access
-                    S3ClientOptions clientOptions = new S3ClientOptions();
-                    clientOptions.setPathStyleAccess(false);
+          ClientConfiguration clientConfig = new ClientConfiguration();
+          clientConfig.setProtocol(Protocol.valueOf(s3WriterConfiguration.getProtocol().toString()));
 
-                    this.amazonS3Client = new AmazonS3Client(credentials, clientConfig);
-                    if (!Strings.isNullOrEmpty(s3WriterConfiguration.getRegion()))
-                        this.amazonS3Client.setRegion(Region.getRegion(Regions.fromName(s3WriterConfiguration.getRegion())));
-                    this.amazonS3Client.setS3ClientOptions(clientOptions);
-                }
-            } catch (Exception e) {
-                LOGGER.error("Exception while preparing the S3 client: {}", e);
-            }
+          // We do not want path style access
+          S3ClientOptions clientOptions = new S3ClientOptions();
+          clientOptions.setPathStyleAccess(false);
 
-            Preconditions.checkArgument(this.amazonS3Client != null);
+          this.amazonS3Client = new AmazonS3Client(credentials, clientConfig);
+          if (!Strings.isNullOrEmpty(s3WriterConfiguration.getRegion())) {
+            this.amazonS3Client.setRegion(Region.getRegion(Regions.fromName(s3WriterConfiguration.getRegion())));
+          }
+          this.amazonS3Client.setS3ClientOptions(clientOptions);
         }
-    }
-
-    public void cleanUp() {
-        closeAndDestroyWriter();
-    }
+      } catch (Exception ex) {
+        LOGGER.error("Exception while preparing the S3 client: {}", ex);
+      }
 
-    public DatumStatusCounter getDatumStatusCounter() {
-        DatumStatusCounter counters = new DatumStatusCounter();
-        counters.incrementAttempt(this.totalRecordsWritten.get());
-        counters.incrementStatus(DatumStatus.SUCCESS, this.totalRecordsWritten.get());
-        return counters;
+      Preconditions.checkArgument(this.amazonS3Client != null);
     }
+  }
+
+  public void cleanUp() {
+    closeAndDestroyWriter();
+  }
+
+  @Override
+  public DatumStatusCounter getDatumStatusCounter() {
+    DatumStatusCounter counters = new DatumStatusCounter();
+    counters.incrementAttempt(this.totalRecordsWritten.get());
+    counters.incrementStatus(DatumStatus.SUCCESS, this.totalRecordsWritten.get());
+    return counters;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistReader.java b/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistReader.java
index 8793333..43d9e34 100644
--- a/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistReader.java
+++ b/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistReader.java
@@ -18,102 +18,100 @@
 
 package org.apache.streams.console;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistReader;
-import org.apache.streams.core.StreamsPersistWriter;
 import org.apache.streams.core.StreamsResultSet;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.BufferedInputStream;
 import java.io.InputStream;
-import java.io.PrintStream;
 import java.math.BigInteger;
 import java.util.Queue;
 import java.util.Scanner;
 import java.util.concurrent.ConcurrentLinkedQueue;
 
+/**
+ * ConsolePersistReader reads documents from stdin.
+ */
 public class ConsolePersistReader implements StreamsPersistReader {
 
-    private final static String STREAMS_ID = "ConsolePersistReader";
+  private static final String STREAMS_ID = "ConsolePersistReader";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(ConsolePersistReader.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ConsolePersistReader.class);
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    protected InputStream inputStream = System.in;
+  protected InputStream inputStream = System.in;
 
-    public ConsolePersistReader() {
-        this.persistQueue = new ConcurrentLinkedQueue<StreamsDatum>();
-    }
+  public ConsolePersistReader() {
+    this.persistQueue = new ConcurrentLinkedQueue<StreamsDatum>();
+  }
 
-    public ConsolePersistReader(InputStream inputStream) {
-        this();
-        this.inputStream = inputStream;
-    }
+  public ConsolePersistReader(InputStream inputStream) {
+    this();
+    this.inputStream = inputStream;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    public void prepare(Object o) {
+  public void prepare(Object configuration) {
 
-    }
+  }
 
-    public void cleanUp() {
+  public void cleanUp() {
 
-    }
+  }
 
-    @Override
-    public void startStream() {
-        // no op
-    }
+  @Override
+  public void startStream() {
+    // no op
+  }
 
-    @Override
-    public StreamsResultSet readAll() {
-        return readCurrent();
-    }
+  @Override
+  public StreamsResultSet readAll() {
+    return readCurrent();
+  }
 
-    @Override
-    public StreamsResultSet readCurrent() {
+  @Override
+  public StreamsResultSet readCurrent() {
 
-        LOGGER.info("{} readCurrent", STREAMS_ID);
+    LOGGER.info("{} readCurrent", STREAMS_ID);
 
-        Scanner sc = new Scanner(inputStream);
+    Scanner sc = new Scanner(inputStream);
 
-        while( sc.hasNextLine() ) {
+    while ( sc.hasNextLine() ) {
 
-            persistQueue.offer(new StreamsDatum(sc.nextLine()));
+      persistQueue.offer(new StreamsDatum(sc.nextLine()));
 
-        }
+    }
 
-        LOGGER.info("Providing {} docs", persistQueue.size());
+    LOGGER.info("Providing {} docs", persistQueue.size());
 
-        StreamsResultSet result =  new StreamsResultSet(persistQueue);
+    StreamsResultSet result =  new StreamsResultSet(persistQueue);
 
-        LOGGER.info("{} Exiting", STREAMS_ID);
+    LOGGER.info("{} Exiting", STREAMS_ID);
 
-        return result;
+    return result;
 
-    }
+  }
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return readCurrent();
-    }
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return readCurrent();
+  }
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return readCurrent();
-    }
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return readCurrent();
+  }
 
-    @Override
-    public boolean isRunning() {
-        return true;  //Will always be running
-    }
+  @Override
+  public boolean isRunning() {
+    return true;  //Will always be running
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriter.java b/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriter.java
index 6d284ba..6358071 100644
--- a/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriter.java
+++ b/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriter.java
@@ -18,12 +18,14 @@
 
 package org.apache.streams.console;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -31,53 +33,56 @@ import java.io.PrintStream;
 import java.util.Queue;
 import java.util.concurrent.ConcurrentLinkedQueue;
 
+/**
+ * ConsolePersistWriter writes documents to stdout.
+ */
 public class ConsolePersistWriter implements StreamsPersistWriter {
 
-    private final static String STREAMS_ID = "ConsolePersistWriter";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(ConsolePersistWriter.class);
+  private static final String STREAMS_ID = "ConsolePersistWriter";
 
-    protected PrintStream printStream = System.out;
+  private static final Logger LOGGER = LoggerFactory.getLogger(ConsolePersistWriter.class);
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+  protected PrintStream printStream = System.out;
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    public ConsolePersistWriter() {
-        this.persistQueue = new ConcurrentLinkedQueue<StreamsDatum>();
-    }
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    public ConsolePersistWriter(PrintStream printStream) {
-        this();
-        this.printStream = printStream;
-    }
+  public ConsolePersistWriter() {
+    this.persistQueue = new ConcurrentLinkedQueue<StreamsDatum>();
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public ConsolePersistWriter(PrintStream printStream) {
+    this();
+    this.printStream = printStream;
+  }
 
-    public void prepare(Object o) {
-        Preconditions.checkNotNull(persistQueue);
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    public void cleanUp() {
+  public void prepare(Object configuration) {
+    Preconditions.checkNotNull(persistQueue);
+  }
 
-    }
+  public void cleanUp() {
 
-    @Override
-    public void write(StreamsDatum entry) {
+  }
 
-        try {
+  @Override
+  public void write(StreamsDatum entry) {
 
-            String text = mapper.writeValueAsString(entry);
+    try {
 
-            printStream.println(text);
+      String text = mapper.writeValueAsString(entry);
 
-        } catch (JsonProcessingException e) {
-            LOGGER.warn("save: {}", e);
-        }
+      printStream.println(text);
 
+    } catch (JsonProcessingException ex) {
+      LOGGER.warn("save: {}", ex);
     }
 
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriterTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriterTask.java b/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriterTask.java
index e5009f0..ecb60e3 100644
--- a/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriterTask.java
+++ b/streams-contrib/streams-persist-console/src/main/java/org/apache/streams/console/ConsolePersistWriterTask.java
@@ -19,36 +19,43 @@
 package org.apache.streams.console;
 
 import org.apache.streams.core.StreamsDatum;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Random;
 
+/**
+ * ConsolePersistWriterTask writes documents to stdout on behalf of
+ * @see org.apache.streams.console.ConsolePersistWriter
+ */
 public class ConsolePersistWriterTask implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(ConsolePersistWriterTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ConsolePersistWriterTask.class);
 
-    private ConsolePersistWriter writer;
+  private ConsolePersistWriter writer;
 
-    public ConsolePersistWriterTask(ConsolePersistWriter writer) {
-        this.writer = writer;
-    }
+  public ConsolePersistWriterTask(ConsolePersistWriter writer) {
+    this.writer = writer;
+  }
 
-    @Override
-    public void run() {
-        while(true) {
-            if( writer.persistQueue.peek() != null ) {
-                try {
-                    StreamsDatum entry = writer.persistQueue.remove();
-                    writer.write(entry);
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-            try {
-                Thread.sleep(new Random().nextInt(100));
-            } catch (InterruptedException e) {}
+  @Override
+  public void run() {
+    while (true) {
+      if ( writer.persistQueue.peek() != null ) {
+        try {
+          StreamsDatum entry = writer.persistQueue.remove();
+          writer.write(entry);
+        } catch (Exception ex) {
+          ex.printStackTrace();
         }
+      }
+      try {
+        Thread.sleep(new Random().nextInt(100));
+      } catch (InterruptedException interrupt) {
+        LOGGER.trace("Interrupted", interrupt);
+      }
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClient.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClient.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClient.java
index 0b2b782..8c7f724 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClient.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClient.java
@@ -21,21 +21,24 @@ package org.apache.streams.elasticsearch;
 import org.elasticsearch.Version;
 import org.elasticsearch.client.Client;
 
+/**
+ * Wrapper class for a client with a known version.
+ */
 public class ElasticsearchClient {
 
-    private Client client;
-    private Version version;
+  private Client client;
+  private Version version;
 
-    public ElasticsearchClient(Client client, Version version) {
-        this.client = client;
-        this.version = version;
-    }
+  public ElasticsearchClient(Client client, Version version) {
+    this.client = client;
+    this.version = version;
+  }
 
-    public Client getClient() {
-        return client;
-    }
+  public Client getClient() {
+    return client;
+  }
 
-    public Version getVersion() {
-        return version;
-    }
+  public Version getVersion() {
+    return version;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClientManager.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClientManager.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClientManager.java
index 4809334..bdff9aa 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClientManager.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchClientManager.java
@@ -19,6 +19,7 @@
 package org.apache.streams.elasticsearch;
 
 import com.google.common.net.InetAddresses;
+
 import org.apache.commons.lang.builder.EqualsBuilder;
 import org.apache.commons.lang.builder.HashCodeBuilder;
 import org.apache.commons.lang.builder.ToStringBuilder;
@@ -41,157 +42,154 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.ExecutionException;
 
+/**
+ * Wrapper class for multiple
+ * @see org.apache.streams.elasticsearch.ElasticsearchClient
+ */
 public class ElasticsearchClientManager {
-    private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchClientManager.class);
-    private static Map<String, ElasticsearchClient> ALL_CLIENTS = new HashMap<>();
-
-    private ElasticsearchConfiguration elasticsearchConfiguration;
-
-    public ElasticsearchClientManager(ElasticsearchConfiguration elasticsearchConfiguration) {
-        this.elasticsearchConfiguration = elasticsearchConfiguration;
-    }
 
-    public ElasticsearchConfiguration getElasticsearchConfiguration() {
-        return elasticsearchConfiguration;
+  private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchClientManager.class);
+  private static Map<String, ElasticsearchClient> ALL_CLIENTS = new HashMap<>();
+
+  private ElasticsearchConfiguration elasticsearchConfiguration;
+
+  public ElasticsearchClientManager(ElasticsearchConfiguration elasticsearchConfiguration) {
+    this.elasticsearchConfiguration = elasticsearchConfiguration;
+  }
+
+  public ElasticsearchConfiguration getElasticsearchConfiguration() {
+    return elasticsearchConfiguration;
+  }
+
+  /**
+   * Get the Client for this return, it is actually a transport client, but it is much
+   * easier to work with the generic object as this interface likely won't change from
+   * elasticsearch. This method is synchronized to block threads from creating
+   * too many of these at any given time.
+   *
+   * @return Client for elasticsearch
+   */
+  public Client getClient() {
+    checkAndLoadClient(null);
+
+    return ALL_CLIENTS.get(this.elasticsearchConfiguration.getClusterName()).getClient();
+  }
+
+  /**
+   * Returns Client with clusterName.
+   * @param clusterName clusterName
+   */
+  public Client getClient(String clusterName) {
+    checkAndLoadClient(clusterName);
+
+    return ALL_CLIENTS.get(this.elasticsearchConfiguration.getClusterName()).getClient();
+  }
+
+  public boolean isOnOrAfterVersion(Version version) {
+    return ALL_CLIENTS.get(this.elasticsearchConfiguration.getClusterName()).getVersion().onOrAfter(version);
+  }
+
+  public boolean refresh(String index) {
+    return refresh(new String[]{index});
+  }
+
+  public boolean refresh(String[] indexes) {
+    RefreshResponse refreshResponse = this.getClient().admin().indices().prepareRefresh(indexes).execute().actionGet();
+    return refreshResponse.getFailedShards() == 0;
+  }
+
+  /**
+   * Terminate the elasticsearch clients.
+   */
+  public synchronized void stop() {
+    // Check to see if we have a client.
+    if (ALL_CLIENTS.containsKey(this.elasticsearchConfiguration.getClusterName())) {
+      // Close the client
+      ALL_CLIENTS.get(this.elasticsearchConfiguration.getClusterName()).getClient().close();
+
+      // Remove it so that it isn't in memory any more.
+      ALL_CLIENTS.remove(this.elasticsearchConfiguration.getClusterName());
     }
+  }
 
-    /**
-     * ***********************************************************************************
-     * Get the Client for this return, it is actually a transport client, but it is much
-     * easier to work with the generic object as this interface likely won't change from
-     * elasticsearch. This method is synchronized to block threads from creating
-     * too many of these at any given time.
-     *
-     * @return Client for elasticsearch
-     * ***********************************************************************************
-     */
-    public Client getClient() {
-        checkAndLoadClient(null);
-
-        return ALL_CLIENTS.get(this.elasticsearchConfiguration.getClusterName()).getClient();
-    }
+  public ClusterHealthResponse getStatus() throws ExecutionException, InterruptedException {
+    ClusterHealthRequestBuilder request = this.getClient().admin().cluster().prepareHealth();
+    return request.execute().get();
+  }
 
-    public Client getClient(String clusterName) {
-        checkAndLoadClient(clusterName);
+  public String toString() {
+    return ToStringBuilder.reflectionToString(this);
+  }
 
-        return ALL_CLIENTS.get(this.elasticsearchConfiguration.getClusterName()).getClient();
-    }
+  public boolean equals(Object configuration) {
+    return EqualsBuilder.reflectionEquals(this, configuration, Collections.singletonList(this.elasticsearchConfiguration.toString()));
+  }
 
-    public boolean isOnOrAfterVersion(Version version) {
-        return ALL_CLIENTS.get(this.elasticsearchConfiguration.getClusterName()).getVersion().onOrAfter(version);
-    }
+  public int hashCode() {
+    return HashCodeBuilder.reflectionHashCode(this, Collections.singletonList(this.elasticsearchConfiguration.toString()));
+  }
 
-    public void start() throws Exception {
-        /*
-         * Note:
-         * Everything in these classes is being switched to lazy loading. Within
-         * Heroku you only have 60 seconds to connect, and bind to the service,
-         * and you are only allowed to run in 1Gb of memory. Switching all
-         * of this to lazy loading is how we are fixing some of the issues
-         * if you are having issues with these classes, please, refactor
-         * and create a UNIT TEST CASE!!!!!! To ensure that everything is
-         * working before you check it back in.
-         *
-         * Author: Smashew @ 2013-08-26
-         **********************************************************************/
-    }
+  private synchronized void checkAndLoadClient(String clusterName) {
 
-    public boolean refresh(String index) {
-        return refresh(new String[]{index});
+    if (clusterName == null) {
+      clusterName = this.elasticsearchConfiguration.getClusterName();
     }
 
-    public boolean refresh(String[] indexes) {
-        RefreshResponse refreshResponse = this.getClient().admin().indices().prepareRefresh(indexes).execute().actionGet();
-        return refreshResponse.getFailedShards() == 0;
+    // If it is there, exit early
+    if (ALL_CLIENTS.containsKey(clusterName)) {
+      return;
     }
 
-    public synchronized void stop() {
-        // Terminate the elasticsearch cluster
-        // Check to see if we have a client.
-        if (ALL_CLIENTS.containsKey(this.elasticsearchConfiguration.getClusterName())) {
-            // Close the client
-            ALL_CLIENTS.get(this.elasticsearchConfiguration.getClusterName()).getClient().close();
-
-            // Remove it so that it isn't in memory any more.
-            ALL_CLIENTS.remove(this.elasticsearchConfiguration.getClusterName());
+    try {
+      // We are currently using lazy loading to start the elasticsearch cluster, however.
+      LOGGER.info("Creating a new TransportClient: {}", this.elasticsearchConfiguration.getHosts());
+
+      Settings settings = Settings.settingsBuilder()
+          .put("cluster.name", this.elasticsearchConfiguration.getClusterName())
+          .put("client.transport.ping_timeout", "90s")
+          .put("client.transport.nodes_sampler_interval", "60s")
+          .build();
+
+
+      // Create the client
+      TransportClient transportClient = TransportClient.builder().settings(settings).build();
+      for (String h : elasticsearchConfiguration.getHosts()) {
+        LOGGER.info("Adding Host: {}", h);
+        InetAddress address;
+
+        if ( InetAddresses.isInetAddress(h)) {
+          LOGGER.info("{} is an IP address", h);
+          address = InetAddresses.forString(h);
+        } else {
+          LOGGER.info("{} is a hostname", h);
+          address = InetAddress.getByName(h);
         }
-    }
+        transportClient.addTransportAddress(
+            new InetSocketTransportAddress(
+                address,
+                elasticsearchConfiguration.getPort().intValue()));
+      }
 
-    public ClusterHealthResponse getStatus() throws ExecutionException, InterruptedException {
-        ClusterHealthRequestBuilder request = this.getClient().admin().cluster().prepareHealth();
-        return request.execute().get();
-    }
+      // Add the client and figure out the version.
+      ElasticsearchClient elasticsearchClient = new ElasticsearchClient(transportClient, getVersion(transportClient));
 
-    public String toString() {
-        return ToStringBuilder.reflectionToString(this);
-    }
+      // Add it to our static map
+      ALL_CLIENTS.put(clusterName, elasticsearchClient);
 
-    public boolean equals(Object o) {
-        return EqualsBuilder.reflectionEquals(this, o, Collections.singletonList(this.elasticsearchConfiguration.toString()));
+    } catch (Exception ex) {
+      LOGGER.error("Could not Create elasticsearch Transport Client: {}", ex);
     }
 
-    public int hashCode() {
-        return HashCodeBuilder.reflectionHashCode(this, Collections.singletonList(this.elasticsearchConfiguration.toString()));
-    }
+  }
 
-    private synchronized void checkAndLoadClient(String clusterName) {
-
-        if (clusterName == null)
-            clusterName = this.elasticsearchConfiguration.getClusterName();
-
-        // If it is there, exit early
-        if (ALL_CLIENTS.containsKey(clusterName))
-            return;
-
-        try {
-            // We are currently using lazy loading to start the elasticsearch cluster, however.
-            LOGGER.info("Creating a new TransportClient: {}", this.elasticsearchConfiguration.getHosts());
-
-            Settings settings = Settings.settingsBuilder()
-                    .put("cluster.name", this.elasticsearchConfiguration.getClusterName())
-                    .put("client.transport.ping_timeout", "90s")
-                    .put("client.transport.nodes_sampler_interval", "60s")
-                    .build();
-
-
-            // Create the client
-            TransportClient transportClient = TransportClient.builder().settings(settings).build();
-            for (String h : elasticsearchConfiguration.getHosts()) {
-                LOGGER.info("Adding Host: {}", h);
-                InetAddress address;
-
-                if( InetAddresses.isInetAddress(h)) {
-                    LOGGER.info("{} is an IP address", h);
-                    address = InetAddresses.forString(h);
-                } else {
-                    LOGGER.info("{} is a hostname", h);
-                    address = InetAddress.getByName(h);
-                }
-                transportClient.addTransportAddress(
-                        new InetSocketTransportAddress(
-                                address,
-                                elasticsearchConfiguration.getPort().intValue()));
-            }
-            // Add the client and figure out the version.
-            ElasticsearchClient elasticsearchClient = new ElasticsearchClient(transportClient, getVersion(transportClient));
-
-            // Add it to our static map
-            ALL_CLIENTS.put(clusterName, elasticsearchClient);
-
-        } catch (Exception e) {
-            LOGGER.error("Could not Create elasticsearch Transport Client: {}", e);
-        }
+  private Version getVersion(Client client) {
+    try {
+      ClusterStateRequestBuilder clusterStateRequestBuilder = client.admin().cluster().prepareState();
+      ClusterStateResponse clusterStateResponse = clusterStateRequestBuilder.execute().actionGet();
 
+      return clusterStateResponse.getState().getNodes().getMasterNode().getVersion();
+    } catch (Exception ex) {
+      return null;
     }
-
-    private Version getVersion(Client client) {
-        try {
-            ClusterStateRequestBuilder clusterStateRequestBuilder = client.admin().cluster().prepareState();
-            ClusterStateResponse clusterStateResponse = clusterStateRequestBuilder.execute().actionGet();
-
-            return clusterStateResponse.getState().getNodes().getMasterNode().getVersion();
-        } catch (Exception e) {
-            return null;
-        }
-    }
+  }
 }



[02/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/Schema.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/Schema.java b/streams-util/src/main/java/org/apache/streams/util/schema/Schema.java
index 795bf98..a9517c1 100644
--- a/streams-util/src/main/java/org/apache/streams/util/schema/Schema.java
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/Schema.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema;
 
 import com.fasterxml.jackson.databind.JsonNode;
@@ -26,49 +27,69 @@ import java.net.URI;
  */
 public class Schema {
 
-    private final URI id;
-    private final URI uri;
-    private final JsonNode content;
-    private final Schema parent;
-    private final boolean generate;
+  private final URI id;
+  private final URI uri;
+  private final JsonNode content;
+  private final Schema parent;
+  private final boolean generate;
 
-    public Schema(URI uri, JsonNode content, Schema parent, boolean generate) {
-        this.uri = uri;
-        this.content = content;
-        this.parent = parent;
-        this.generate = generate;
-        this.id = content.has("id") ? URI.create(content.get("id").asText()) : null;
-    }
+  /**
+   * Schema constructor.
+   * @param uri uri
+   * @param content JsonNode content
+   * @param parent Schema parent
+   * @param generate whether to generate
+   */
+  public Schema(URI uri, JsonNode content, Schema parent, boolean generate) {
+    this.uri = uri;
+    this.content = content;
+    this.parent = parent;
+    this.generate = generate;
+    this.id = content.has("id") ? URI.create(content.get("id").asText()) : null;
+  }
 
-    public URI getId() {
-        return id;
-    }
+  public URI getId() {
+    return id;
+  }
 
-    public URI getURI() {
-        return uri;
-    }
+  public URI getUri() {
+    return uri;
+  }
 
-    public JsonNode getContent() {
-        return content;
-    }
+  public JsonNode getContent() {
+    return content;
+  }
 
-    public JsonNode getParentContent() {
-        if( parent != null )
-            return parent.getContent();
-        else return null;
+  /**
+   * getParentContent.
+   * @return Parent.Content
+   */
+  public JsonNode getParentContent() {
+    if ( parent != null ) {
+      return parent.getContent();
+    } else {
+      return null;
     }
+  }
 
-    public URI getParentURI() {
-        if( parent != null ) return parent.getURI();
-        else return null;
+  /**
+   * getParentUri.
+   * @return Parent.Uri
+   */
+  public URI getParentUri() {
+    if ( parent != null ) {
+      return parent.getUri();
+    } else {
+      return null;
     }
+  }
 
-    public boolean isGenerated() {
-        return generate;
-    }
+  public boolean isGenerated() {
+    return generate;
+  }
 
-    public Schema getParent() {
-        return parent;
-    }
+  public Schema getParent() {
+    return parent;
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStore.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStore.java b/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStore.java
index 779df41..4fca239 100644
--- a/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStore.java
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStore.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema;
 
 import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -27,33 +28,34 @@ import java.util.Iterator;
 /**
  * A SchemaStore resolves and indexes json schemas and makes their properties available.
  *
+ * <p/>
  * Implementations include
  * - SchemaStoreImpl
  */
 public interface SchemaStore extends Comparator<Schema> {
-    
-    Schema create(URI uri);
 
-    Schema create(Schema parent, String path);
+  Schema create(URI uri);
+
+  Schema create(Schema parent, String path);
 
-    void clearCache();
+  void clearCache();
 
-    Integer getSize();
+  Integer getSize();
 
-    Optional<Schema> getById(URI id);
+  Optional<Schema> getById(URI id);
 
-    Optional<Schema> getByUri(URI uri);
+  Optional<Schema> getByUri(URI uri);
 
-    Integer getFileUriCount();
+  Integer getFileUriCount();
 
-    Integer getHttpUriCount();
+  Integer getHttpUriCount();
 
-    Iterator<Schema> getSchemaIterator();
+  Iterator<Schema> getSchemaIterator();
 
-    ObjectNode resolveProperties(Schema schema, ObjectNode fieldNode, String resourceId);
+  ObjectNode resolveProperties(Schema schema, ObjectNode fieldNode, String resourceId);
 
-    ObjectNode resolveItems(Schema schema, ObjectNode fieldNode, String resourceId);
+  ObjectNode resolveItems(Schema schema, ObjectNode fieldNode, String resourceId);
 
-    @Override
-    int compare(Schema left, Schema right);
+  @Override
+  int compare(Schema left, Schema right);
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStoreImpl.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStoreImpl.java b/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStoreImpl.java
index 7126c82..e99380d 100644
--- a/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStoreImpl.java
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/SchemaStoreImpl.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema;
 
 import com.fasterxml.jackson.databind.JsonNode;
@@ -36,329 +37,364 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import static org.apache.streams.util.schema.URIUtil.safeResolve;
+import static org.apache.streams.util.schema.UriUtil.safeResolve;
 
 /**
- * Created by steve on 4/30/16.
+ * Default Implementation of SchemaStore.
  */
 public class SchemaStoreImpl extends Ordering<Schema> implements SchemaStore {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SchemaStore.class);
-    private final static JsonNodeFactory NODE_FACTORY = JsonNodeFactory.instance;
+  private static final Logger LOGGER = LoggerFactory.getLogger(SchemaStore.class);
+  private static final JsonNodeFactory NODE_FACTORY = JsonNodeFactory.instance;
 
-    protected Map<URI, Schema> schemas = new HashMap();
-    protected FragmentResolver fragmentResolver = new FragmentResolver();
-    protected ContentResolver contentResolver = new ContentResolver();
+  protected Map<URI, Schema> schemas = new HashMap();
+  protected FragmentResolver fragmentResolver = new FragmentResolver();
+  protected ContentResolver contentResolver = new ContentResolver();
 
-    public SchemaStoreImpl() {
-    }
+  public SchemaStoreImpl() {
+  }
 
-    @Override
-    public synchronized Schema create(URI uri) {
-        if(!getByUri(uri).isPresent()) {
-            URI baseURI = URIUtil.removeFragment(uri);
-            JsonNode baseNode = this.contentResolver.resolve(baseURI);
-            if(uri.toString().contains("#") && !uri.toString().endsWith("#")) {
-                Schema newSchema = new Schema(baseURI, baseNode, null, true);
-                this.schemas.put(baseURI, newSchema);
-                JsonNode childContent = this.fragmentResolver.resolve(baseNode, '#' + StringUtils.substringAfter(uri.toString(), "#"));
-                this.schemas.put(uri, new Schema(uri, childContent, newSchema, false));
-            } else {
-                if( baseNode.has("extends") && baseNode.get("extends").isObject()) {
-                    URI ref = URI.create(((ObjectNode)baseNode.get("extends")).get("$ref").asText());
-                    URI absoluteURI;
-                    if( ref.isAbsolute())
-                        absoluteURI = ref;
-                    else
-                        absoluteURI = baseURI.resolve(ref);
-                    JsonNode parentNode = this.contentResolver.resolve(absoluteURI);
-                    Schema parentSchema = null;
-                    if( this.schemas.get(absoluteURI) != null ) {
-                        parentSchema = this.schemas.get(absoluteURI);
-                    } else {
-                        parentSchema = create(absoluteURI);
-                    }
-                    this.schemas.put(uri, new Schema(uri, baseNode, parentSchema, true));
-                } else {
-                    this.schemas.put(uri, new Schema(uri, baseNode, null, true));
-                }
-            }
-            List<JsonNode> refs = baseNode.findValues("$ref");
-            for( JsonNode ref : refs ) {
-                if( ref.isValueNode() ) {
-                    String refVal = ref.asText();
-                    URI refURI = null;
-                    try {
-                        refURI = URI.create(refVal);
-                    } catch( Exception e ) {
-                        LOGGER.info("Exception: {}", e.getMessage());
-                    }
-                    if (refURI != null && !getByUri(refURI).isPresent()) {
-                        if (refURI.isAbsolute())
-                            create(refURI);
-                        else
-                            create(baseURI.resolve(refURI));
-                    }
-                }
-            }
-        }
-
-        return this.schemas.get(uri);
-    }
-
-    @Override
-    public Schema create(Schema parent, String path) {
-        if(path.equals("#")) {
-            return parent;
+  @Override
+  public synchronized Schema create(URI uri) {
+    if (!getByUri(uri).isPresent()) {
+      URI baseUri = UriUtil.removeFragment(uri);
+      JsonNode baseNode = this.contentResolver.resolve(baseUri);
+      if (uri.toString().contains("#") && !uri.toString().endsWith("#")) {
+        Schema newSchema = new Schema(baseUri, baseNode, null, true);
+        this.schemas.put(baseUri, newSchema);
+        JsonNode childContent = this.fragmentResolver.resolve(baseNode, '#' + StringUtils.substringAfter(uri.toString(), "#"));
+        this.schemas.put(uri, new Schema(uri, childContent, newSchema, false));
+      } else {
+        if ( baseNode.has("extends") && baseNode.get("extends").isObject()) {
+          URI ref = URI.create(((ObjectNode)baseNode.get("extends")).get("$ref").asText());
+          URI absoluteUri;
+          if ( ref.isAbsolute()) {
+            absoluteUri = ref;
+          } else {
+            absoluteUri = baseUri.resolve(ref);
+          }
+          JsonNode parentNode = this.contentResolver.resolve(absoluteUri);
+          Schema parentSchema = null;
+          if ( this.schemas.get(absoluteUri) != null ) {
+            parentSchema = this.schemas.get(absoluteUri);
+          } else {
+            parentSchema = create(absoluteUri);
+          }
+          this.schemas.put(uri, new Schema(uri, baseNode, parentSchema, true));
         } else {
-            path = StringUtils.stripEnd(path, "#?&/");
-            URI id = parent != null && parent.getId() != null?parent.getId().resolve(path):URI.create(path);
-            if(this.selfReferenceWithoutParentFile(parent, path)) {
-                this.schemas.put(id, new Schema(id, this.fragmentResolver.resolve(parent.getParentContent(), path), parent, false));
-                return this.schemas.get(id);
+          this.schemas.put(uri, new Schema(uri, baseNode, null, true));
+        }
+      }
+      List<JsonNode> refs = baseNode.findValues("$ref");
+      for ( JsonNode ref : refs ) {
+        if ( ref.isValueNode() ) {
+          String refVal = ref.asText();
+          URI refUri = null;
+          try {
+            refUri = URI.create(refVal);
+          } catch ( Exception ex ) {
+            LOGGER.info("Exception: {}", ex.getMessage());
+          }
+          if (refUri != null && !getByUri(refUri).isPresent()) {
+            if (refUri.isAbsolute()) {
+              create(refUri);
             } else {
-                return this.create(id);
+              create(baseUri.resolve(refUri));
             }
+          }
         }
+      }
     }
 
-    protected boolean selfReferenceWithoutParentFile(Schema parent, String path) {
-        return parent != null && (parent.getId() == null || parent.getId().toString().startsWith("#/")) && path.startsWith("#/");
-    }
+    return this.schemas.get(uri);
+  }
 
-    @Override
-    public synchronized void clearCache() {
-        this.schemas.clear();
+  @Override
+  public Schema create(Schema parent, String path) {
+    if (path.equals("#")) {
+      return parent;
+    } else {
+      path = StringUtils.stripEnd(path, "#?&/");
+      URI id = (parent != null && parent.getId() != null)
+          ? parent.getId().resolve(path)
+          : URI.create(path);
+      if (this.selfReferenceWithoutParentFile(parent, path)) {
+        this.schemas.put(id, new Schema(id, this.fragmentResolver.resolve(parent.getParentContent(), path), parent, false));
+        return this.schemas.get(id);
+      } else {
+        return this.create(id);
+      }
     }
+  }
 
-    @Override
-    public Integer getSize() {
-        return schemas.size();
-    }
+  protected boolean selfReferenceWithoutParentFile(Schema parent, String path) {
+    return parent != null && (parent.getId() == null || parent.getId().toString().startsWith("#/")) && path.startsWith("#/");
+  }
 
-    @Override
-    public Optional<Schema> getById(URI id) {
-        for( Schema schema : schemas.values() ) {
-            if( schema.getId() != null && schema.getId().equals(id) )
-                return Optional.of(schema);
-        }
-        return Optional.absent();
-    }
+  @Override
+  public synchronized void clearCache() {
+    this.schemas.clear();
+  }
 
-    @Override
-    public Optional<Schema> getByUri(URI uri) {
-        for( Schema schema : schemas.values() ) {
-            if( schema.getURI().equals(uri) )
-                return Optional.of(schema);
-        }
-        return Optional.absent();
-    }
+  @Override
+  public Integer getSize() {
+    return schemas.size();
+  }
 
-    @Override
-    public Integer getFileUriCount() {
-        int count = 0;
-        for( Schema schema : schemas.values() ) {
-            if( schema.getURI().getScheme().equals("file") )
-                count++;
-        }
-        return count;
+  @Override
+  public Optional<Schema> getById(URI id) {
+    for ( Schema schema : schemas.values() ) {
+      if ( schema.getId() != null && schema.getId().equals(id) ) {
+        return Optional.of(schema);
+      }
     }
+    return Optional.absent();
+  }
 
-    @Override
-    public Integer getHttpUriCount() {
-        int count = 0;
-        for( Schema schema : schemas.values() ) {
-            if( schema.getURI().getScheme().equals("http") )
-                count++;
-        }
-        return count;
+  @Override
+  public Optional<Schema> getByUri(URI uri) {
+    for ( Schema schema : schemas.values() ) {
+      if ( schema.getUri().equals(uri) ) {
+        return Optional.of(schema);
+      }
     }
+    return Optional.absent();
+  }
 
-    @Override
-    public Iterator<Schema> getSchemaIterator() {
-        List<Schema> schemaList = Lists.newArrayList(schemas.values());
-        Collections.sort(schemaList, this);
-        return schemaList.iterator();
+  @Override
+  public Integer getFileUriCount() {
+    int count = 0;
+    for ( Schema schema : schemas.values() ) {
+      if ( schema.getUri().getScheme().equals("file") ) {
+        count++;
+      }
     }
+    return count;
+  }
 
-    @Override
-    public ObjectNode resolveProperties(Schema schema, ObjectNode fieldNode, String resourceId) {
-        // this should return something more suitable like:
-        //   Map<String, Pair<Schema, ObjectNode>>
-        ObjectNode schemaProperties = NODE_FACTORY.objectNode();
-        ObjectNode parentProperties = NODE_FACTORY.objectNode();
-        if (fieldNode == null) {
-            ObjectNode schemaContent = (ObjectNode) schema.getContent();
-            if( schemaContent.has("properties") ) {
-                schemaProperties = (ObjectNode) schemaContent.get("properties");
-                if (schema.getParentContent() != null) {
-                    ObjectNode parentContent = (ObjectNode) schema.getParentContent();
-                    if (parentContent.has("properties")) {
-                        parentProperties = (ObjectNode) parentContent.get("properties");
-                    }
-                }
-            }
-        } else if (fieldNode != null && fieldNode.size() > 0) {
-            if( fieldNode.has("properties") && fieldNode.get("properties").isObject() && fieldNode.get("properties").size() > 0 )
-                schemaProperties = (ObjectNode) fieldNode.get("properties");
-            URI parentURI = null;
-            if( fieldNode.has("$ref") || fieldNode.has("extends") ) {
-                JsonNode refNode = fieldNode.get("$ref");
-                JsonNode extendsNode = fieldNode.get("extends");
-                if (refNode != null && refNode.isValueNode())
-                    parentURI = URI.create(refNode.asText());
-                else if (extendsNode != null && extendsNode.isObject())
-                    parentURI = URI.create(extendsNode.get("$ref").asText());
-                ObjectNode parentContent = null;
-                URI absoluteURI;
-                if (parentURI.isAbsolute())
-                    absoluteURI = parentURI;
-                else {
-                    absoluteURI = schema.getURI().resolve(parentURI);
-                    if (!absoluteURI.isAbsolute() || (absoluteURI.isAbsolute() && !getByUri(absoluteURI).isPresent() ))
-                        absoluteURI = schema.getParentURI().resolve(parentURI);
-                }
-                if (absoluteURI != null && absoluteURI.isAbsolute()) {
-                    if (getByUri(absoluteURI).isPresent())
-                        parentContent = (ObjectNode) getByUri(absoluteURI).get().getContent();
-                    if (parentContent != null && parentContent.isObject() && parentContent.has("properties")) {
-                        parentProperties = (ObjectNode) parentContent.get("properties");
-                    } else if (absoluteURI.getPath().endsWith("#properties")) {
-                        absoluteURI = URI.create(absoluteURI.toString().replace("#properties", ""));
-                        parentProperties = (ObjectNode) getByUri(absoluteURI).get().getContent().get("properties");
-                    }
-                }
-            }
+  @Override
+  public Integer getHttpUriCount() {
+    int count = 0;
+    for ( Schema schema : schemas.values() ) {
+      if ( schema.getUri().getScheme().equals("http") ) {
+        count++;
+      }
+    }
+    return count;
+  }
 
+  @Override
+  public Iterator<Schema> getSchemaIterator() {
+    List<Schema> schemaList = Lists.newArrayList(schemas.values());
+    Collections.sort(schemaList, this);
+    return schemaList.iterator();
+  }
 
+  @Override
+  public ObjectNode resolveProperties(Schema schema, ObjectNode fieldNode, String resourceId) {
+    // this should return something more suitable like:
+    //   Map<String, Pair<Schema, ObjectNode>>
+    ObjectNode schemaProperties = NODE_FACTORY.objectNode();
+    ObjectNode parentProperties = NODE_FACTORY.objectNode();
+    if (fieldNode == null) {
+      ObjectNode schemaContent = (ObjectNode) schema.getContent();
+      if (schemaContent.has("properties")) {
+        schemaProperties = (ObjectNode) schemaContent.get("properties");
+        if (schema.getParentContent() != null) {
+          ObjectNode parentContent = (ObjectNode) schema.getParentContent();
+          if (parentContent.has("properties")) {
+            parentProperties = (ObjectNode) parentContent.get("properties");
+          }
+        }
+      }
+    } else if (fieldNode != null && fieldNode.size() > 0) {
+      if (fieldNode.has("properties") && fieldNode.get("properties").isObject() && fieldNode.get("properties").size() > 0) {
+        schemaProperties = (ObjectNode) fieldNode.get("properties");
+      }
+      URI parentUri = null;
+      if ( fieldNode.has("$ref") || fieldNode.has("extends") ) {
+        JsonNode refNode = fieldNode.get("$ref");
+        JsonNode extendsNode = fieldNode.get("extends");
+        if (refNode != null && refNode.isValueNode()) {
+          parentUri = URI.create(refNode.asText());
+        } else if (extendsNode != null && extendsNode.isObject()) {
+          parentUri = URI.create(extendsNode.get("$ref").asText());
         }
+        ObjectNode parentContent = null;
+        URI absoluteUri;
+        if (parentUri.isAbsolute()) {
+          absoluteUri = parentUri;
+        } else {
+          absoluteUri = schema.getUri().resolve(parentUri);
+          if (!absoluteUri.isAbsolute() || (absoluteUri.isAbsolute() && !getByUri(absoluteUri).isPresent() )) {
+            absoluteUri = schema.getParentUri().resolve(parentUri);
+          }
+        }
+        if (absoluteUri != null && absoluteUri.isAbsolute()) {
+          if (getByUri(absoluteUri).isPresent()) {
+            parentContent = (ObjectNode) getByUri(absoluteUri).get().getContent();
+          }
+          if (parentContent != null && parentContent.isObject() && parentContent.has("properties")) {
+            parentProperties = (ObjectNode) parentContent.get("properties");
+          } else if (absoluteUri.getPath().endsWith("#properties")) {
+            absoluteUri = URI.create(absoluteUri.toString().replace("#properties", ""));
+            parentProperties = (ObjectNode) getByUri(absoluteUri).get().getContent().get("properties");
+          }
+        }
+      }
 
-        ObjectNode resolvedProperties = NODE_FACTORY.objectNode();
-        if (parentProperties != null && parentProperties.size() > 0)
-            resolvedProperties = SchemaUtil.mergeProperties(schemaProperties, parentProperties);
-        else resolvedProperties = schemaProperties.deepCopy();
 
-        return resolvedProperties;
     }
 
-    public ObjectNode resolveItems(Schema schema, ObjectNode fieldNode, String resourceId) {
-        ObjectNode schemaItems = NODE_FACTORY.objectNode();
-        ObjectNode parentItems = NODE_FACTORY.objectNode();
-        if (fieldNode == null) {
-            ObjectNode schemaContent = (ObjectNode) schema.getContent();
-            if( schemaContent.has("items") ) {
-                schemaItems = (ObjectNode) schemaContent.get("items");
-                if (schema.getParentContent() != null) {
-                    ObjectNode parentContent = (ObjectNode) schema.getParentContent();
-                    if (parentContent.has("items")) {
-                        parentItems = (ObjectNode) parentContent.get("items");
-                    }
-                }
-            }
-        } else if (fieldNode != null && fieldNode.size() > 0) {
-            if (fieldNode.has("items") && fieldNode.get("items").isObject() && fieldNode.get("items").size() > 0)
-                schemaItems = (ObjectNode) fieldNode.get("items");
-            URI parentURI = null;
-            if( fieldNode.has("$ref") || fieldNode.has("extends") ) {
-                JsonNode refNode = fieldNode.get("$ref");
-                JsonNode extendsNode = fieldNode.get("extends");
-                if (refNode != null && refNode.isValueNode())
-                    parentURI = URI.create(refNode.asText());
-                else if (extendsNode != null && extendsNode.isObject())
-                    parentURI = URI.create(extendsNode.get("$ref").asText());
-                ObjectNode parentContent = null;
-                URI absoluteURI;
-                if (parentURI.isAbsolute())
-                    absoluteURI = parentURI;
-                else {
-                    absoluteURI = schema.getURI().resolve(parentURI);
-                    if (!absoluteURI.isAbsolute() || (absoluteURI.isAbsolute() && !getByUri(absoluteURI).isPresent() ))
-                        absoluteURI = schema.getParentURI().resolve(parentURI);
-                }
-                if (absoluteURI != null && absoluteURI.isAbsolute()) {
-                    if (getByUri(absoluteURI).isPresent())
-                        parentContent = (ObjectNode) getByUri(absoluteURI).get().getContent();
-                    if (parentContent != null && parentContent.isObject() && parentContent.has("items")) {
-                        parentItems = (ObjectNode) parentContent.get("items");
-                    } else if (absoluteURI.getPath().endsWith("#items")) {
-                        absoluteURI = URI.create(absoluteURI.toString().replace("#items", ""));
-                        parentItems = (ObjectNode) getByUri(absoluteURI).get().getContent().get("items");
-                    }
-                }
-            }
-        }
-
-        ObjectNode resolvedItems = NODE_FACTORY.objectNode();
-        if (parentItems != null && parentItems.size() > 0)
-            resolvedItems = SchemaUtil.mergeProperties(schemaItems, parentItems);
-        else resolvedItems = schemaItems.deepCopy();
-
-        return resolvedItems;
+    ObjectNode resolvedProperties = NODE_FACTORY.objectNode();
+    if (parentProperties != null && parentProperties.size() > 0) {
+      resolvedProperties = SchemaUtil.mergeProperties(schemaProperties, parentProperties);
+    } else {
+      resolvedProperties = schemaProperties.deepCopy();
     }
 
-    @Override
-    public int compare(Schema left, Schema right) {
-        // are they the same?
-        if( left.equals(right)) return 0;
-        // is one an ancestor of the other
-        Schema candidateAncestor = left;
-        while( candidateAncestor.getParent() != null ) {
-            candidateAncestor = candidateAncestor.getParent();
-            if( candidateAncestor.equals(right))
-                return 1;
+    return resolvedProperties;
+  }
+
+  /**
+   * resolve full definition of 'items'.
+   * @param schema Schema
+   * @param fieldNode ObjectNode
+   * @param resourceId resourceId
+   * @return ObjectNode
+   */
+  public ObjectNode resolveItems(Schema schema, ObjectNode fieldNode, String resourceId) {
+    ObjectNode schemaItems = NODE_FACTORY.objectNode();
+    ObjectNode parentItems = NODE_FACTORY.objectNode();
+    if (fieldNode == null) {
+      ObjectNode schemaContent = (ObjectNode) schema.getContent();
+      if ( schemaContent.has("items") ) {
+        schemaItems = (ObjectNode) schemaContent.get("items");
+        if (schema.getParentContent() != null) {
+          ObjectNode parentContent = (ObjectNode) schema.getParentContent();
+          if (parentContent.has("items")) {
+            parentItems = (ObjectNode) parentContent.get("items");
+          }
         }
-        candidateAncestor = right;
-        while( candidateAncestor.getParent() != null ) {
-            candidateAncestor = candidateAncestor.getParent();
-            if( candidateAncestor.equals(left))
-                return -1;
+      }
+    } else if (fieldNode != null && fieldNode.size() > 0) {
+      if (fieldNode.has("items") && fieldNode.get("items").isObject() && fieldNode.get("items").size() > 0) {
+        schemaItems = (ObjectNode) fieldNode.get("items");
+      }
+      URI parentUri = null;
+      if ( fieldNode.has("$ref") || fieldNode.has("extends") ) {
+        JsonNode refNode = fieldNode.get("$ref");
+        JsonNode extendsNode = fieldNode.get("extends");
+        if (refNode != null && refNode.isValueNode()) {
+          parentUri = URI.create(refNode.asText());
+        } else if (extendsNode != null && extendsNode.isObject()) {
+          parentUri = URI.create(extendsNode.get("$ref").asText());
         }
-        // does one have a field that reference the other?
-        for( JsonNode refNode : left.getContent().findValues("$ref") ) {
-            String refText = refNode.asText();
-            Optional<URI> resolvedURI = safeResolve(left.getURI(), refText);
-            if( resolvedURI.isPresent() && resolvedURI.get().equals(right.getURI()))
-                return 1;
+        ObjectNode parentContent = null;
+        URI absoluteUri;
+        if (parentUri.isAbsolute()) {
+          absoluteUri = parentUri;
+        } else {
+          absoluteUri = schema.getUri().resolve(parentUri);
+          if (!absoluteUri.isAbsolute() || (absoluteUri.isAbsolute() && !getByUri(absoluteUri).isPresent() )) {
+            absoluteUri = schema.getParentUri().resolve(parentUri);
+          }
         }
-        for( JsonNode refNode : right.getContent().findValues("$ref") ) {
-            String refText = refNode.asText();
-            Optional<URI> resolvedURI = safeResolve(right.getURI(), refText);
-            if( resolvedURI.isPresent() && resolvedURI.get().equals(left.getURI()))
-                return -1;
+        if (absoluteUri != null && absoluteUri.isAbsolute()) {
+          if (getByUri(absoluteUri).isPresent()) {
+            parentContent = (ObjectNode) getByUri(absoluteUri).get().getContent();
+          }
+          if (parentContent != null && parentContent.isObject() && parentContent.has("items")) {
+            parentItems = (ObjectNode) parentContent.get("items");
+          } else if (absoluteUri.getPath().endsWith("#items")) {
+            absoluteUri = URI.create(absoluteUri.toString().replace("#items", ""));
+            parentItems = (ObjectNode) getByUri(absoluteUri).get().getContent().get("items");
+          }
         }
-        // does one have a field that reference a third schema that references the other?
-        for( JsonNode refNode : left.getContent().findValues("$ref") ) {
-            String refText = refNode.asText();
-            Optional<URI> possibleConnectorURI = safeResolve(left.getURI(), refText);
-            if( possibleConnectorURI.isPresent()) {
-                Optional<Schema> possibleConnector = getByUri(possibleConnectorURI.get());
-                if (possibleConnector.isPresent()) {
-                    for (JsonNode connectorRefNode : possibleConnector.get().getContent().findValues("$ref")) {
-                        String connectorRefText = connectorRefNode.asText();
-                        Optional<URI> resolvedURI = safeResolve(possibleConnector.get().getURI(), connectorRefText);
-                        if (resolvedURI.isPresent() && resolvedURI.get().equals(right.getURI()))
-                            return 1;
-                    }
-                }
+      }
+    }
+
+    ObjectNode resolvedItems = NODE_FACTORY.objectNode();
+    if (parentItems != null && parentItems.size() > 0) {
+      resolvedItems = SchemaUtil.mergeProperties(schemaItems, parentItems);
+    } else {
+      resolvedItems = schemaItems.deepCopy();
+    }
+
+    return resolvedItems;
+  }
+
+  @Override
+  public int compare(Schema left, Schema right) {
+    // are they the same?
+    if ( left.equals(right)) {
+      return 0;
+    }
+    // is one an ancestor of the other
+    Schema candidateAncestor = left;
+    while ( candidateAncestor.getParent() != null ) {
+      candidateAncestor = candidateAncestor.getParent();
+      if ( candidateAncestor.equals(right)) {
+        return 1;
+      }
+    }
+    candidateAncestor = right;
+    while ( candidateAncestor.getParent() != null ) {
+      candidateAncestor = candidateAncestor.getParent();
+      if ( candidateAncestor.equals(left)) {
+        return -1;
+      }
+    }
+    // does one have a field that reference the other?
+    for ( JsonNode refNode : left.getContent().findValues("$ref") ) {
+      String refText = refNode.asText();
+      Optional<URI> resolvedUri = safeResolve(left.getUri(), refText);
+      if ( resolvedUri.isPresent() && resolvedUri.get().equals(right.getUri())) {
+        return 1;
+      }
+    }
+    for ( JsonNode refNode : right.getContent().findValues("$ref") ) {
+      String refText = refNode.asText();
+      Optional<URI> resolvedUri = safeResolve(right.getUri(), refText);
+      if ( resolvedUri.isPresent() && resolvedUri.get().equals(left.getUri())) {
+        return -1;
+      }
+    }
+    // does one have a field that reference a third schema that references the other?
+    for ( JsonNode refNode : left.getContent().findValues("$ref") ) {
+      String refText = refNode.asText();
+      Optional<URI> possibleConnectorUri = safeResolve(left.getUri(), refText);
+      if ( possibleConnectorUri.isPresent()) {
+        Optional<Schema> possibleConnector = getByUri(possibleConnectorUri.get());
+        if (possibleConnector.isPresent()) {
+          for (JsonNode connectorRefNode : possibleConnector.get().getContent().findValues("$ref")) {
+            String connectorRefText = connectorRefNode.asText();
+            Optional<URI> resolvedUri = safeResolve(possibleConnector.get().getUri(), connectorRefText);
+            if (resolvedUri.isPresent() && resolvedUri.get().equals(right.getUri())) {
+              return 1;
             }
+          }
         }
-        for( JsonNode refNode : right.getContent().findValues("$ref") ) {
-            String refText = refNode.asText();
-            Optional<URI> possibleConnectorURI = safeResolve(right.getURI(), refText);
-            if( possibleConnectorURI.isPresent()) {
-                Optional<Schema> possibleConnector = getByUri(possibleConnectorURI.get());
-                if (possibleConnector.isPresent()) {
-                    for (JsonNode connectorRefNode : possibleConnector.get().getContent().findValues("$ref")) {
-                        String connectorRefText = connectorRefNode.asText();
-                        Optional<URI> resolvedURI = safeResolve(possibleConnector.get().getURI(), connectorRefText);
-                        if (resolvedURI.isPresent() && resolvedURI.get().equals(left.getURI()))
-                            return -1;
-                    }
-                }
+      }
+    }
+    for ( JsonNode refNode : right.getContent().findValues("$ref") ) {
+      String refText = refNode.asText();
+      Optional<URI> possibleConnectorUri = safeResolve(right.getUri(), refText);
+      if ( possibleConnectorUri.isPresent()) {
+        Optional<Schema> possibleConnector = getByUri(possibleConnectorUri.get());
+        if (possibleConnector.isPresent()) {
+          for (JsonNode connectorRefNode : possibleConnector.get().getContent().findValues("$ref")) {
+            String connectorRefText = connectorRefNode.asText();
+            Optional<URI> resolvedUri = safeResolve(possibleConnector.get().getUri(), connectorRefText);
+            if (resolvedUri.isPresent() && resolvedUri.get().equals(left.getUri())) {
+              return -1;
             }
+          }
         }
-        return 0;
+      }
     }
+    return 0;
+  }
 
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/SchemaUtil.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/SchemaUtil.java b/streams-util/src/main/java/org/apache/streams/util/schema/SchemaUtil.java
index 785ec58..e4b7928 100644
--- a/streams-util/src/main/java/org/apache/streams/util/schema/SchemaUtil.java
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/SchemaUtil.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema;
 
 import com.fasterxml.jackson.databind.JsonNode;
@@ -34,33 +35,44 @@ import static org.apache.commons.lang3.StringUtils.isEmpty;
  */
 public class SchemaUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SchemaUtil.class);
-    private static final JsonNodeFactory NODE_FACTORY = JsonNodeFactory.instance;
-    public static final String ILLEGAL_CHARACTER_REGEX = "[^0-9a-zA-Z_$]";
+  private static final Logger LOGGER = LoggerFactory.getLogger(SchemaUtil.class);
+  private static final JsonNodeFactory NODE_FACTORY = JsonNodeFactory.instance;
+  public static final String ILLEGAL_CHARACTER_REGEX = "[^0-9a-zA-Z_$]";
 
-    public static String childQualifiedName(String parentQualifiedName, String childSimpleName) {
-        String safeChildName = childSimpleName.replaceAll(ILLEGAL_CHARACTER_REGEX, "_");
-        return isEmpty(parentQualifiedName) ? safeChildName : parentQualifiedName + "." + safeChildName;
-    }
+  public static String childQualifiedName(String parentQualifiedName, String childSimpleName) {
+    String safeChildName = childSimpleName.replaceAll(ILLEGAL_CHARACTER_REGEX, "_");
+    return isEmpty(parentQualifiedName) ? safeChildName : parentQualifiedName + "." + safeChildName;
+  }
 
-    public static ObjectNode readSchema(URL schemaUrl) {
+  /**
+   * read Schema from URL.
+   * @param schemaUrl URL
+   * @return ObjectNode
+   */
+  public static ObjectNode readSchema(URL schemaUrl) {
 
-        ObjectNode schemaNode = NODE_FACTORY.objectNode();
-        schemaNode.put("$ref", schemaUrl.toString());
-        return schemaNode;
+    ObjectNode schemaNode = NODE_FACTORY.objectNode();
+    schemaNode.put("$ref", schemaUrl.toString());
+    return schemaNode;
 
-    }
+  }
 
-    public static ObjectNode mergeProperties(ObjectNode content, ObjectNode parent) {
+  /**
+   * merge parent and child properties maps.
+   * @param content ObjectNode
+   * @param parent ObjectNode
+   * @return merged ObjectNode
+   */
+  public static ObjectNode mergeProperties(ObjectNode content, ObjectNode parent) {
 
-        ObjectNode merged = parent.deepCopy();
-        Iterator<Map.Entry<String, JsonNode>> fields = content.fields();
-        for( ; fields.hasNext(); ) {
-            Map.Entry<String, JsonNode> field = fields.next();
-            String fieldId = field.getKey();
-            merged.put(fieldId, field.getValue().deepCopy());
-        }
-        return merged;
+    ObjectNode merged = parent.deepCopy();
+    Iterator<Map.Entry<String, JsonNode>> fields = content.fields();
+    for ( ; fields.hasNext(); ) {
+      Map.Entry<String, JsonNode> field = fields.next();
+      String fieldId = field.getKey();
+      merged.put(fieldId, field.getValue().deepCopy());
     }
+    return merged;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/URIUtil.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/URIUtil.java b/streams-util/src/main/java/org/apache/streams/util/schema/URIUtil.java
deleted file mode 100644
index d645675..0000000
--- a/streams-util/src/main/java/org/apache/streams/util/schema/URIUtil.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.streams.util.schema;
-
-import com.google.common.base.Optional;
-import org.apache.commons.lang3.StringUtils;
-
-import java.net.URI;
-
-/**
- * URIUtil contains methods to assist in resolving URIs and URI fragments.
- */
-public class URIUtil {
-
-    public static URI removeFragment(URI id) {
-        return URI.create(StringUtils.substringBefore(id.toString(), "#"));
-    }
-
-    public static URI removeFile(URI id) {
-        return URI.create(StringUtils.substringBeforeLast(id.toString(), "/"));
-    }
-
-    public static Optional<URI> safeResolve(URI absolute, String relativePart) {
-        if( !absolute.isAbsolute()) return Optional.absent();
-        try {
-            return Optional.of(absolute.resolve(relativePart));
-        } catch( IllegalArgumentException e ) {
-            return Optional.absent();
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/UriUtil.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/UriUtil.java b/streams-util/src/main/java/org/apache/streams/util/schema/UriUtil.java
new file mode 100644
index 0000000..33b656d
--- /dev/null
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/UriUtil.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.util.schema;
+
+import com.google.common.base.Optional;
+import org.apache.commons.lang3.StringUtils;
+
+import java.net.URI;
+
+/**
+ * UriUtil contains methods to assist in resolving URIs and URI fragments.
+ */
+public class UriUtil {
+
+  public static URI removeFragment(URI id) {
+    return URI.create(StringUtils.substringBefore(id.toString(), "#"));
+  }
+
+  public static URI removeFile(URI id) {
+    return URI.create(StringUtils.substringBeforeLast(id.toString(), "/"));
+  }
+
+  /**
+   * resolve a remote schema safely.
+   * @param absolute root URI
+   * @param relativePart relative to root
+   * @return URI if resolvable, or Optional.absent()
+   */
+  public static Optional<URI> safeResolve(URI absolute, String relativePart) {
+    if ( !absolute.isAbsolute()) {
+      return Optional.absent();
+    }
+    try {
+      return Optional.of(absolute.resolve(relativePart));
+    } catch ( IllegalArgumentException ex ) {
+      return Optional.absent();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/BackOffStrategyTest.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/BackOffStrategyTest.java b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/BackOffStrategyTest.java
index 108813e..00380e6 100644
--- a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/BackOffStrategyTest.java
+++ b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/BackOffStrategyTest.java
@@ -24,64 +24,63 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 /**
- * Unit Tets
+ * Unit Test for BackOffStrategy.
  */
 public class BackOffStrategyTest {
 
+  private class TestBackOff extends AbstractBackOffStrategy {
 
-    private class TestBackOff extends AbstractBackOffStrategy {
-
-        public TestBackOff(long sleep, int maxAttempts) {
-            super(sleep, maxAttempts);
-        }
-
-        @Override
-        protected long calculateBackOffTime(int attemptCount, long baseSleepTime) {
-            return baseSleepTime;
-        }
+    public TestBackOff(long sleep, int maxAttempts) {
+      super(sleep, maxAttempts);
     }
 
-    @Test
-    public void testUnlimitedBackOff() {
-        AbstractBackOffStrategy backOff = new TestBackOff(1, -1);
-        try {
-            for(int i=0; i < 100; ++i) {
-                backOff.backOff();
-            }
-        } catch (BackOffException boe) {
-            fail("Threw BackOffException.  Not expected action");
-        }
+    @Override
+    protected long calculateBackOffTime(int attemptCount, long baseSleepTime) {
+      return baseSleepTime;
     }
+  }
 
-    @Test
-    public void testLimitedUseBackOff()  {
-        AbstractBackOffStrategy backOff = new TestBackOff(1, 2);
-        try {
-            backOff.backOff();
-        } catch (BackOffException boe) {
-            fail("Threw BackOffExpection. Not expected action");
-        }
-        try {
-            backOff.backOff();
-        } catch (BackOffException boe) {
-            fail("Threw BackOffExpection. Not expected action");
-        }
-        try {
-            backOff.backOff();
-            fail("Expected BackOffException to be thrown.");
-        } catch (BackOffException boe) {
-
-        }
+  @Test
+  public void testUnlimitedBackOff() {
+    AbstractBackOffStrategy backOff = new TestBackOff(1, -1);
+    try {
+      for (int i = 0; i < 100; ++i) {
+        backOff.backOff();
+      }
+    } catch (BackOffException boe) {
+      fail("Threw BackOffException.  Not expected action");
     }
+  }
 
-    @Test
-    public void testBackOffSleep() throws BackOffException {
-        AbstractBackOffStrategy backOff = new TestBackOff(2000, 1);
-        long startTime = System.currentTimeMillis();
-        backOff.backOff();
-        long endTime = System.currentTimeMillis();
-        assertTrue(endTime - startTime >= 2000);
+  @Test
+  public void testLimitedUseBackOff()  {
+    AbstractBackOffStrategy backOff = new TestBackOff(1, 2);
+    try {
+      backOff.backOff();
+    } catch (BackOffException boe) {
+      fail("Threw BackOffExpection. Not expected action");
+    }
+    try {
+      backOff.backOff();
+    } catch (BackOffException boe) {
+      fail("Threw BackOffExpection. Not expected action");
     }
+    try {
+      backOff.backOff();
+      fail("Expected BackOffException to be thrown.");
+    } catch (BackOffException boe) {
+      //
+    }
+  }
+
+  @Test
+  public void testBackOffSleep() throws BackOffException {
+    AbstractBackOffStrategy backOff = new TestBackOff(2000, 1);
+    long startTime = System.currentTimeMillis();
+    backOff.backOff();
+    long endTime = System.currentTimeMillis();
+    assertTrue(endTime - startTime >= 2000);
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ConstantTimeBackOffStrategyTest.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ConstantTimeBackOffStrategyTest.java b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ConstantTimeBackOffStrategyTest.java
index 0eedaa0..e10a7e2 100644
--- a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ConstantTimeBackOffStrategyTest.java
+++ b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ConstantTimeBackOffStrategyTest.java
@@ -18,27 +18,25 @@
 
 package org.apache.streams.util.api.requests.backoff;
 
-import com.carrotsearch.randomizedtesting.RandomizedTest;
 import org.apache.streams.util.api.requests.backoff.impl.ConstantTimeBackOffStrategy;
-import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
+import com.carrotsearch.randomizedtesting.RandomizedTest;
+import org.junit.Test;
 
 /**
- * Unit Tests
+ * Unit Test for BackOffStrategy.
  */
-public class ConstantTimeBackOffStrategyTest extends RandomizedTest{
-
-
-    @Test
-    public void constantTimeBackOffStategy() {
-        AbstractBackOffStrategy backOff = new ConstantTimeBackOffStrategy(1);
-        assertEquals(1, backOff.calculateBackOffTime(1,1));
-        assertEquals(1, backOff.calculateBackOffTime(2,1));
-        assertEquals(1, backOff.calculateBackOffTime(3,1));
-        assertEquals(1, backOff.calculateBackOffTime(4,1));
-        assertEquals(1, backOff.calculateBackOffTime(randomIntBetween(1, Integer.MAX_VALUE),1));
-    }
+public class ConstantTimeBackOffStrategyTest extends RandomizedTest {
+
+  @Test
+  public void constantTimeBackOffStategy() {
+    AbstractBackOffStrategy backOff = new ConstantTimeBackOffStrategy(1);
+    assertEquals(1, backOff.calculateBackOffTime(1,1));
+    assertEquals(1, backOff.calculateBackOffTime(2,1));
+    assertEquals(1, backOff.calculateBackOffTime(3,1));
+    assertEquals(1, backOff.calculateBackOffTime(4,1));
+    assertEquals(1, backOff.calculateBackOffTime(randomIntBetween(1, Integer.MAX_VALUE),1));
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ExponentialBackOffStrategyTest.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ExponentialBackOffStrategyTest.java b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ExponentialBackOffStrategyTest.java
index d595254..70f25ec 100644
--- a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ExponentialBackOffStrategyTest.java
+++ b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/ExponentialBackOffStrategyTest.java
@@ -19,23 +19,24 @@
 package org.apache.streams.util.api.requests.backoff;
 
 import org.apache.streams.util.api.requests.backoff.impl.ExponentialBackOffStrategy;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
 /**
- * Unit Tests
+ * Unit Test for ExponentialBackOffStrategy.
  */
 public class ExponentialBackOffStrategyTest {
 
-    @Test
-    public void exponentialTimeBackOffStrategyTest() {
-        AbstractBackOffStrategy backOff = new ExponentialBackOffStrategy(1);
-        assertEquals(5000, backOff.calculateBackOffTime(1,5));
-        assertEquals(25000, backOff.calculateBackOffTime(2,5));
-        assertEquals(125000, backOff.calculateBackOffTime(3,5));
-        assertEquals(2000, backOff.calculateBackOffTime(1,2));
-        assertEquals(16000, backOff.calculateBackOffTime(4,2));
-    }
+  @Test
+  public void exponentialTimeBackOffStrategyTest() {
+    AbstractBackOffStrategy backOff = new ExponentialBackOffStrategy(1);
+    assertEquals(5000, backOff.calculateBackOffTime(1,5));
+    assertEquals(25000, backOff.calculateBackOffTime(2,5));
+    assertEquals(125000, backOff.calculateBackOffTime(3,5));
+    assertEquals(2000, backOff.calculateBackOffTime(1,2));
+    assertEquals(16000, backOff.calculateBackOffTime(4,2));
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/LinearTimeBackOffStartegyTest.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/LinearTimeBackOffStartegyTest.java b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/LinearTimeBackOffStartegyTest.java
deleted file mode 100644
index 8b3f384..0000000
--- a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/LinearTimeBackOffStartegyTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.util.api.requests.backoff;
-
-import org.apache.streams.util.api.requests.backoff.impl.LinearTimeBackOffStrategy;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-
-/**
- * Unit Tests
- */
-public class LinearTimeBackOffStartegyTest {
-
-    @Test
-    public void linearTimeBackOffStrategyTest() {
-        AbstractBackOffStrategy backOff = new LinearTimeBackOffStrategy(1);
-        assertEquals(1000, backOff.calculateBackOffTime(1,1));
-        assertEquals(2000, backOff.calculateBackOffTime(2,1));
-        assertEquals(3000, backOff.calculateBackOffTime(3,1));
-        assertEquals(4000, backOff.calculateBackOffTime(4,1));
-        assertEquals(25000, backOff.calculateBackOffTime(5,5));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/LinearTimeBackOffStrategyTest.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/LinearTimeBackOffStrategyTest.java b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/LinearTimeBackOffStrategyTest.java
new file mode 100644
index 0000000..9477b64
--- /dev/null
+++ b/streams-util/src/test/java/org/apache/streams/util/api/requests/backoff/LinearTimeBackOffStrategyTest.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.util.api.requests.backoff;
+
+import org.apache.streams.util.api.requests.backoff.impl.LinearTimeBackOffStrategy;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+/**
+ * Unit Test for LinearTimeBackOffStrategy.
+ */
+public class LinearTimeBackOffStrategyTest {
+
+  @Test
+  public void linearTimeBackOffStrategyTest() {
+    AbstractBackOffStrategy backOff = new LinearTimeBackOffStrategy(1);
+    assertEquals(1000, backOff.calculateBackOffTime(1,1));
+    assertEquals(2000, backOff.calculateBackOffTime(2,1));
+    assertEquals(3000, backOff.calculateBackOffTime(3,1));
+    assertEquals(4000, backOff.calculateBackOffTime(4,1));
+    assertEquals(25000, backOff.calculateBackOffTime(5,5));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/files/StreamsScannerUtil.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/files/StreamsScannerUtil.java b/streams-util/src/test/java/org/apache/streams/util/files/StreamsScannerUtil.java
index 576cef0..97aafd7 100644
--- a/streams-util/src/test/java/org/apache/streams/util/files/StreamsScannerUtil.java
+++ b/streams-util/src/test/java/org/apache/streams/util/files/StreamsScannerUtil.java
@@ -18,7 +18,6 @@
 
 package org.apache.streams.util.files;
 
-import java.io.File;
 import java.io.InputStream;
 import java.util.Scanner;
 import java.util.regex.Pattern;
@@ -28,12 +27,17 @@ import java.util.regex.Pattern;
  */
 public class StreamsScannerUtil {
 
-    protected static Pattern newLinePattern = Pattern.compile("(\\r\\n?|\\n)", Pattern.MULTILINE);
+  protected static Pattern newLinePattern = Pattern.compile("(\\r\\n?|\\n)", Pattern.MULTILINE);
 
-    public static Scanner getInstance(String resourcePath) {
+  /**
+   * get instance of Scanner using resource path.
+   * @param resourcePath resourcePath
+   * @return Scanner
+   */
+  public static Scanner getInstance(String resourcePath) {
 
-        InputStream testFileStream = StreamsScannerUtil.class.getResourceAsStream(resourcePath);
-        return new Scanner(testFileStream, "UTF-8").useDelimiter(newLinePattern);
+    InputStream testFileStream = StreamsScannerUtil.class.getResourceAsStream(resourcePath);
+    return new Scanner(testFileStream, "UTF-8").useDelimiter(newLinePattern);
 
-    };
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/oauth/tokens/tokenmanager/TestBasicTokenManager.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/oauth/tokens/tokenmanager/TestBasicTokenManager.java b/streams-util/src/test/java/org/apache/streams/util/oauth/tokens/tokenmanager/TestBasicTokenManager.java
index c08b68b..b799fce 100644
--- a/streams-util/src/test/java/org/apache/streams/util/oauth/tokens/tokenmanager/TestBasicTokenManager.java
+++ b/streams-util/src/test/java/org/apache/streams/util/oauth/tokens/tokenmanager/TestBasicTokenManager.java
@@ -19,7 +19,8 @@
 package org.apache.streams.util.oauth.tokens.tokenmanager;
 
 import org.apache.streams.util.oauth.tokens.AbstractOauthToken;
-import org.apache.streams.util.oauth.tokens.tokenmanager.impl.BasicTokenManger;
+import org.apache.streams.util.oauth.tokens.tokenmanager.impl.BasicTokenManager;
+
 import org.junit.Test;
 
 import java.util.ArrayList;
@@ -29,165 +30,171 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
- * Unit tests for BasticTokenManager
+ * Unit tests for BasicTokenManager.
  */
 public class TestBasicTokenManager {
 
-    /**
-     * Simple token for testing purposes
-     */
-    private class TestToken extends AbstractOauthToken {
-
-        private String s;
+  /**
+   * Simple token for testing purposes.
+   */
+  private class TestToken extends AbstractOauthToken {
 
-        public TestToken(String s) {
-            this.s = s;
-        }
+    private String token;
 
-        @Override
-        protected boolean internalEquals(Object o) {
-            if(!(o instanceof TestToken))
-                return false;
-            TestToken that = (TestToken) o;
-            return this.s.equals(that.s);
-        }
+    public TestToken(String token) {
+      this.token = token;
     }
 
-    @Test
-    public void testNoArgConstructor() {
-        try {
-            BasicTokenManger manager = new BasicTokenManger<TestToken>();
-            assertEquals(0, manager.numAvailableTokens());
-        } catch (Throwable t) {
-            fail("Constructors threw error: "+t.getMessage());
-        }
+    @Override
+    protected boolean internalEquals(Object otherToken) {
+      if (!(otherToken instanceof TestToken)) {
+        return false;
+      }
+      TestToken that = (TestToken) otherToken;
+      return this.token.equals(that.token);
     }
-
-    @Test
-    public void testCollectionConstructor() {
-        List<TestToken> tokens = new LinkedList<TestToken>();
-        try {
-            BasicTokenManger manager1 = new BasicTokenManger<TestToken>(tokens);
-            tokens.add(new TestToken("a"));
-            tokens.add(new TestToken("b"));
-            assertEquals(0, manager1.numAvailableTokens());
-            BasicTokenManger manager2 = new BasicTokenManger<TestToken>(tokens);
-            assertEquals(2, manager2.numAvailableTokens());
-            assertEquals(0, manager1.numAvailableTokens());
-        } catch (Throwable t) {
-            fail("Constructors threw error: "+t.getMessage());
-        }
+  }
+
+  @Test
+  public void testNoArgConstructor() {
+    try {
+      BasicTokenManager manager = new BasicTokenManager<TestToken>();
+      assertEquals(0, manager.numAvailableTokens());
+    } catch (Throwable throwable) {
+      fail("Constructors threw error: " + throwable.getMessage());
     }
-
-    @Test
-    public void testAddTokenToPool() {
-        BasicTokenManger<TestToken> manager = new BasicTokenManger<TestToken>();
-        assertTrue(manager.addTokenToPool(new TestToken("a")));
-        assertEquals(1, manager.numAvailableTokens());
-        assertFalse(manager.addTokenToPool(new TestToken("a")));
-        assertEquals(1, manager.numAvailableTokens());
-        assertTrue(manager.addTokenToPool(new TestToken("b")));
-        assertEquals(2, manager.numAvailableTokens());
+  }
+
+  @Test
+  public void testCollectionConstructor() {
+    List<TestToken> tokens = new LinkedList<TestToken>();
+    try {
+      BasicTokenManager manager1 = new BasicTokenManager<TestToken>(tokens);
+      tokens.add(new TestToken("a"));
+      tokens.add(new TestToken("b"));
+      assertEquals(0, manager1.numAvailableTokens());
+      BasicTokenManager manager2 = new BasicTokenManager<TestToken>(tokens);
+      assertEquals(2, manager2.numAvailableTokens());
+      assertEquals(0, manager1.numAvailableTokens());
+    } catch (Throwable throwable) {
+      fail("Constructors threw error: " + throwable.getMessage());
     }
-
-    @Test
-    public void testAddAllTokensToPool() {
-        BasicTokenManger<TestToken> manager = new BasicTokenManger<TestToken>();
-        List<TestToken> tokens = new ArrayList<TestToken>();
-        tokens.add(new TestToken("a"));
-        tokens.add(new TestToken("b"));
-        tokens.add(new TestToken("c"));
-        assertTrue(manager.addAllTokensToPool(tokens));
-        assertEquals(3, manager.numAvailableTokens());
-        assertFalse(manager.addAllTokensToPool(tokens));
-        assertEquals(3, manager.numAvailableTokens());
-        tokens.add(new TestToken("d"));
-        assertTrue(manager.addAllTokensToPool(tokens));
-        assertEquals(4, manager.numAvailableTokens());
+  }
+
+  @Test
+  public void testAddTokenToPool() {
+    BasicTokenManager<TestToken> manager = new BasicTokenManager<TestToken>();
+    assertTrue(manager.addTokenToPool(new TestToken("a")));
+    assertEquals(1, manager.numAvailableTokens());
+    assertFalse(manager.addTokenToPool(new TestToken("a")));
+    assertEquals(1, manager.numAvailableTokens());
+    assertTrue(manager.addTokenToPool(new TestToken("b")));
+    assertEquals(2, manager.numAvailableTokens());
+  }
+
+  @Test
+  public void testAddAllTokensToPool() {
+    List<TestToken> tokens = new ArrayList<TestToken>();
+    tokens.add(new TestToken("a"));
+    tokens.add(new TestToken("b"));
+    tokens.add(new TestToken("c"));
+    BasicTokenManager<TestToken> manager = new BasicTokenManager<TestToken>();
+    assertTrue(manager.addAllTokensToPool(tokens));
+    assertEquals(3, manager.numAvailableTokens());
+    assertFalse(manager.addAllTokensToPool(tokens));
+    assertEquals(3, manager.numAvailableTokens());
+    tokens.add(new TestToken("d"));
+    assertTrue(manager.addAllTokensToPool(tokens));
+    assertEquals(4, manager.numAvailableTokens());
+  }
+
+  @Test
+  public void testGetNextAvailableToken() {
+    BasicTokenManager manager = new BasicTokenManager<TestToken>();
+    assertNull(manager.getNextAvailableToken());
+    TestToken tokenA = new TestToken("a");
+    assertTrue(manager.addTokenToPool(tokenA));
+    assertEquals(tokenA, manager.getNextAvailableToken());
+    assertEquals(tokenA, manager.getNextAvailableToken());
+    assertEquals(tokenA, manager.getNextAvailableToken());
+
+    TestToken tokenB = new TestToken("b");
+    TestToken tokenC = new TestToken("c");
+    assertTrue(manager.addTokenToPool(tokenB));
+    assertTrue(manager.addTokenToPool(tokenC));
+    assertEquals(tokenA, manager.getNextAvailableToken());
+    assertEquals(tokenB, manager.getNextAvailableToken());
+    assertEquals(tokenC, manager.getNextAvailableToken());
+    assertEquals(tokenA, manager.getNextAvailableToken());
+    assertEquals(tokenB, manager.getNextAvailableToken());
+    assertEquals(tokenC, manager.getNextAvailableToken());
+  }
+
+  @Test
+  public void testMultiThreadSafety() {
+    int numThreads = 10;
+    ExecutorService executor = Executors.newFixedThreadPool(numThreads);
+    CountDownLatch startLatch = new CountDownLatch(1);
+    CountDownLatch finishLatch = new CountDownLatch(numThreads);
+    BasicTokenManager<TestToken> manager = new BasicTokenManager<TestToken>();
+    for (int i = 0; i < numThreads; ++i) {
+      assertTrue(manager.addTokenToPool(new TestToken(String.valueOf(i))));
     }
-
-    @Test
-    public void testGetNextAvailableToken() {
-        BasicTokenManger manager = new BasicTokenManger<TestToken>();
-        assertNull(manager.getNextAvailableToken());
-        TestToken tokenA = new TestToken("a");
-        assertTrue(manager.addTokenToPool(tokenA));
-        assertEquals(tokenA, manager.getNextAvailableToken());
-        assertEquals(tokenA, manager.getNextAvailableToken());
-        assertEquals(tokenA, manager.getNextAvailableToken());
-
-        TestToken tokenB = new TestToken("b");
-        TestToken tokenC = new TestToken("c");
-        assertTrue(manager.addTokenToPool(tokenB));
-        assertTrue(manager.addTokenToPool(tokenC));
-        assertEquals(tokenA, manager.getNextAvailableToken());
-        assertEquals(tokenB, manager.getNextAvailableToken());
-        assertEquals(tokenC, manager.getNextAvailableToken());
-        assertEquals(tokenA, manager.getNextAvailableToken());
-        assertEquals(tokenB, manager.getNextAvailableToken());
-        assertEquals(tokenC, manager.getNextAvailableToken());
+    for (int i = 0; i < numThreads; ++i) {
+      executor.submit(new TestThread(manager, startLatch, finishLatch, numThreads));
     }
-
-    @Test
-    public void testMultiThreadSafety() {
-        int numThreads = 10;
-        ExecutorService executor = Executors.newFixedThreadPool(numThreads);
-        CountDownLatch startLatch = new CountDownLatch(1);
-        CountDownLatch finishLatch = new CountDownLatch(numThreads);
-        BasicTokenManger<TestToken> manager = new BasicTokenManger<TestToken>();
-        for(int i=0; i < numThreads; ++i) {
-            assertTrue(manager.addTokenToPool(new TestToken(String.valueOf(i))));
-        }
-        for(int i=0; i < numThreads; ++i) {
-            executor.submit(new TestThread(manager, startLatch, finishLatch, numThreads));
-        }
-        try {
-            Thread.sleep(2000); //sleep for 2 seconds so other threads can initialize
-            startLatch.countDown();
-            finishLatch.await();
-            assertTrue("No errors were thrown during thead safe check", true);
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        } catch (Throwable t) {
-            fail("Error occured durring thread safe test : "+t.getMessage());
-        }
+    try {
+      Thread.sleep(2000); //sleep for 2 seconds so other threads can initialize
+      startLatch.countDown();
+      finishLatch.await();
+      assertTrue("No errors were thrown during thead safe check", true);
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
+    } catch (Throwable throwable) {
+      fail("Error occured durring thread safe test : " + throwable.getMessage());
+    }
+  }
+
+  /**
+   * Test class for thread safe check.
+   */
+  private class TestThread implements Runnable {
+
+    private BasicTokenManager<TestToken> manager;
+    private CountDownLatch startLatch;
+    private CountDownLatch finishedLatch;
+    private int availableTokens;
+
+    public TestThread(BasicTokenManager<TestToken> manager, CountDownLatch startLatch, CountDownLatch finishedLatch, int availableTokens) {
+      this.manager = manager;
+      this.startLatch = startLatch;
+      this.finishedLatch = finishedLatch;
+      this.availableTokens = availableTokens;
     }
 
-    /**
-     * Test class for thread safe check.
-     */
-    private class TestThread implements Runnable {
-
-        private BasicTokenManger<TestToken> manager;
-        private CountDownLatch startLatch;
-        private CountDownLatch finishedLatch;
-        private int availableTokens;
-
-        public TestThread(BasicTokenManger<TestToken> manager, CountDownLatch startLatch, CountDownLatch finishedLatch, int availableTokens) {
-            this.manager = manager;
-            this.startLatch = startLatch;
-            this.finishedLatch = finishedLatch;
-            this.availableTokens = availableTokens;
-        }
-
-        @Override
-        public void run() {
-            try {
-                this.startLatch.await();
-                for(int i=0; i < 1000; ++i) {
-                    assertNotNull(this.manager.getNextAvailableToken());
-                    assertEquals(this.availableTokens, this.manager.numAvailableTokens());
-                }
-                this.finishedLatch.countDown();
-            } catch (InterruptedException ie) {
-                Thread.currentThread().interrupt();
-            } catch (Throwable t) {
-                fail("Threw error in multithread test : "+t.getMessage());
-            }
+    @Override
+    public void run() {
+      try {
+        this.startLatch.await();
+        for (int i = 0; i < 1000; ++i) {
+          assertNotNull(this.manager.getNextAvailableToken());
+          assertEquals(this.availableTokens, this.manager.numAvailableTokens());
         }
+        this.finishedLatch.countDown();
+      } catch (InterruptedException ie) {
+        Thread.currentThread().interrupt();
+      } catch (Throwable throwable) {
+        fail("Threw error in multithread test : " + throwable.getMessage());
+      }
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaOrderingTest.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaOrderingTest.java b/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaOrderingTest.java
index 1a72ff6..c155b67 100644
--- a/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaOrderingTest.java
+++ b/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaOrderingTest.java
@@ -15,15 +15,17 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema.test;
 
+import org.apache.streams.util.schema.Schema;
+import org.apache.streams.util.schema.SchemaStore;
+import org.apache.streams.util.schema.SchemaStoreImpl;
+
 import com.google.common.base.Optional;
 import com.google.common.base.Predicate;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
-import org.apache.streams.util.schema.Schema;
-import org.apache.streams.util.schema.SchemaStore;
-import org.apache.streams.util.schema.SchemaStoreImpl;
 import org.junit.Test;
 
 import java.io.File;
@@ -35,133 +37,137 @@ import java.util.List;
  */
 public class SchemaOrderingTest {
 
-    @Test
-    public void compareVerbParent() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File update = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
-        schemaStore.create(update.toURI());
-        File activity = new File("target/test-classes/activitystreams-schemas/activity.json");
-        schemaStore.create(activity.toURI());
-        assert( schemaStore.compare( schemaStore.getByUri(update.toURI()).get(), schemaStore.getByUri(activity.toURI()).get()) == 1);
-        Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
-        assertContainsItemsEndingWithInOrder(
-                schemaIterator,
-                Lists.newArrayList(
-                        "activity.json",
-                        "update.json"
-                )
-        );
+  @Test
+  public void compareVerbParent() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File update = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
+    schemaStore.create(update.toURI());
+    File activity = new File("target/test-classes/activitystreams-schemas/activity.json");
+    schemaStore.create(activity.toURI());
+    assert ( schemaStore.compare( schemaStore.getByUri(update.toURI()).get(), schemaStore.getByUri(activity.toURI()).get()) == 1);
+    Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
+    assertContainsItemsEndingWithInOrder(
+        schemaIterator,
+        Lists.newArrayList(
+            "activity.json",
+            "update.json"
+        )
+    );
+  }
+
+  @Test
+  public void compareObjectTypeParent() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File alert = new File("target/test-classes/activitystreams-schemas/objectTypes/alert.json");
+    schemaStore.create(alert.toURI());
+    File object = new File("target/test-classes/activitystreams-schemas/object.json");
+    schemaStore.create(object.toURI());
+    assert ( schemaStore.compare( schemaStore.getByUri(object.toURI()).get(), schemaStore.getByUri(alert.toURI()).get()) == -1);
+    Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
+    assertContainsItemsEndingWithInOrder(
+        schemaIterator,
+        Lists.newArrayList(
+            "object.json",
+            "alert.json"
+        )
+    );
+  }
+
+  @Test
+  public void compareUnrelated() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File alert = new File("target/test-classes/activitystreams-schemas/objectTypes/alert.json");
+    schemaStore.create(alert.toURI());
+    File update = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
+    schemaStore.create(update.toURI());
+    assert ( schemaStore.compare( schemaStore.getByUri(alert.toURI()).get(), schemaStore.getByUri(update.toURI()).get()) == 0);
+  }
+
+  @Test
+  public void compareVerbFieldRef() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File update = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
+    schemaStore.create(update.toURI());
+    File object = new File("target/test-classes/activitystreams-schemas/object.json");
+    schemaStore.create(object.toURI());
+    assert ( schemaStore.compare( schemaStore.getByUri(update.toURI()).get(), schemaStore.getByUri(object.toURI()).get()) == 1);
+    Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
+    assertContainsItemsEndingWithInOrder(
+        schemaIterator,
+        Lists.newArrayList(
+            "object.json",
+            "update.json"
+        )
+    );
+  }
+
+  @Test
+  public void compareObjectTypeFieldRef() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File alert = new File("target/test-classes/activitystreams-schemas/objectTypes/alert.json");
+    schemaStore.create(alert.toURI());
+    File mediaLink = new File("target/test-classes/activitystreams-schemas/media_link.json");
+    schemaStore.create(mediaLink.toURI());
+    assert ( schemaStore.compare( schemaStore.getByUri(mediaLink.toURI()).get(), schemaStore.getByUri(alert.toURI()).get()) == -1);
+    Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
+    assertContainsItemsEndingWithInOrder(
+        schemaIterator,
+        Lists.newArrayList(
+            "media_link.json",
+            "object.json",
+            "alert.json"
+        )
+    );
+  }
+
+  @Test
+  public void compareVerbAncestorIndirect() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File update = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
+    schemaStore.create(update.toURI());
+    File mediaLink = new File("target/test-classes/activitystreams-schemas/media_link.json");
+    schemaStore.create(mediaLink.toURI());
+    assert ( schemaStore.getByUri(mediaLink.toURI()).isPresent());
+    assert ( schemaStore.getByUri(update.toURI()).isPresent());
+    assert ( schemaStore.compare( schemaStore.getByUri(mediaLink.toURI()).get(), schemaStore.getByUri(update.toURI()).get()) == -1);
+    Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
+    assertContainsItemsEndingWithInOrder(
+        schemaIterator,
+        Lists.newArrayList(
+            "media_link.json",
+            "update.json"
+        )
+    );
+  }
+
+  /**
+   * assert iterator of Schema contains URI items ending with in order.
+   * @param iterator Iterator of Schema
+   * @param items List of String
+   */
+  public void assertContainsItemsEndingWithInOrder(Iterator<Schema> iterator, List<String> items) {
+    for ( String item : items ) {
+      Optional<Schema> tryFind = Iterators.tryFind( iterator, new SchemaUriEndsWithPredicate(item) );
+      assert ( tryFind.isPresent() );
     }
+  }
 
-    @Test
-    public void compareObjectTypeParent() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File alert = new File("target/test-classes/activitystreams-schemas/objectTypes/alert.json");
-        schemaStore.create(alert.toURI());
-        File object = new File("target/test-classes/activitystreams-schemas/object.json");
-        schemaStore.create(object.toURI());
-        assert( schemaStore.compare( schemaStore.getByUri(object.toURI()).get(), schemaStore.getByUri(alert.toURI()).get()) == -1);
-        Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
-        assertContainsItemsEndingWithInOrder(
-                schemaIterator,
-                Lists.newArrayList(
-                        "object.json",
-                        "alert.json"
-                )
-        );
-    }
+  public class SchemaUriEndsWithPredicate implements Predicate<Schema> {
 
-    @Test
-    public void compareUnrelated() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File alert = new File("target/test-classes/activitystreams-schemas/objectTypes/alert.json");
-        schemaStore.create(alert.toURI());
-        File update = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
-        schemaStore.create(update.toURI());
-        assert( schemaStore.compare( schemaStore.getByUri(alert.toURI()).get(), schemaStore.getByUri(update.toURI()).get()) == 0);
-    }
+    private String endsWith;
 
-    @Test
-    public void compareVerbFieldRef() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File update = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
-        schemaStore.create(update.toURI());
-        File object = new File("target/test-classes/activitystreams-schemas/object.json");
-        schemaStore.create(object.toURI());
-        assert( schemaStore.compare( schemaStore.getByUri(update.toURI()).get(), schemaStore.getByUri(object.toURI()).get()) == 1);
-        Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
-        assertContainsItemsEndingWithInOrder(
-                schemaIterator,
-                Lists.newArrayList(
-                        "object.json",
-                        "update.json"
-                )
-        );
+    public SchemaUriEndsWithPredicate(String endsWith) {
+      this.endsWith = endsWith;
     }
 
-    @Test
-    public void compareObjectTypeFieldRef() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File alert = new File("target/test-classes/activitystreams-schemas/objectTypes/alert.json");
-        schemaStore.create(alert.toURI());
-        File media_link = new File("target/test-classes/activitystreams-schemas/media_link.json");
-        schemaStore.create(media_link.toURI());
-        assert( schemaStore.compare( schemaStore.getByUri(media_link.toURI()).get(), schemaStore.getByUri(alert.toURI()).get()) == -1);
-        Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
-        assertContainsItemsEndingWithInOrder(
-                schemaIterator,
-                Lists.newArrayList(
-                        "media_link.json",
-                        "object.json",
-                        "alert.json"
-                )
-        );
+    @Override
+    public boolean apply(Schema input) {
+      return input.getUri().getPath().endsWith(endsWith);
     }
 
-    @Test
-    public void compareVerbAncestorIndirect() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File update = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
-        schemaStore.create(update.toURI());
-        File media_link = new File("target/test-classes/activitystreams-schemas/media_link.json");
-        schemaStore.create(media_link.toURI());
-        assert( schemaStore.getByUri(media_link.toURI()).isPresent());
-        assert( schemaStore.getByUri(update.toURI()).isPresent());
-        assert( schemaStore.compare( schemaStore.getByUri(media_link.toURI()).get(), schemaStore.getByUri(update.toURI()).get()) == -1);
-        Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator();
-        assertContainsItemsEndingWithInOrder(
-                schemaIterator,
-                Lists.newArrayList(
-                        "media_link.json",
-                        "update.json"
-                )
-        );
-    }
-
-
-    public void assertContainsItemsEndingWithInOrder(Iterator<Schema> iterator, List<String> items) {
-        for( String item : items ) {
-            Optional<Schema> tryFind = Iterators.tryFind( iterator, new SchemaUriEndsWithPredicate(item) );
-            assert( tryFind.isPresent() );
-        }
-    }
-
-    public class SchemaUriEndsWithPredicate implements Predicate<Schema> {
-
-        private String endsWith;
-
-        public SchemaUriEndsWithPredicate(String endsWith) {
-            this.endsWith = endsWith;
-        }
-
-        @Override
-        public boolean apply(Schema input) {
-            return input.getURI().getPath().endsWith(endsWith);
-        }
-
-        @Override
-        public boolean equals(Object object) {
-            return false;
-        }
+    @Override
+    public boolean equals(Object object) {
+      return false;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaStoreTest.java
----------------------------------------------------------------------
diff --git a/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaStoreTest.java b/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaStoreTest.java
index 1dce654..6dad615 100644
--- a/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaStoreTest.java
+++ b/streams-util/src/test/java/org/apache/streams/util/schema/test/SchemaStoreTest.java
@@ -15,71 +15,72 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema.test;
 
 import org.apache.streams.util.schema.Schema;
 import org.apache.streams.util.schema.SchemaStore;
 import org.apache.streams.util.schema.SchemaStoreImpl;
+
 import org.junit.Test;
 
 import java.io.File;
-import java.net.URI;
 
 /**
  * Created by sblackmon on 5/2/16.
  */
 public class SchemaStoreTest {
 
-    @Test
-    public void indexMediaLink() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File file = new File("target/test-classes/activitystreams-schemas/media_link.json");
-        schemaStore.create(file.toURI());
-        assert( schemaStore.getFileUriCount() == 1);
-        assert( schemaStore.getByUri(file.toURI()).isPresent());
-        assert( schemaStore.getById(schemaStore.getByUri(file.toURI()).get().getId()).isPresent());
-    }
+  @Test
+  public void indexMediaLink() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File file = new File("target/test-classes/activitystreams-schemas/media_link.json");
+    schemaStore.create(file.toURI());
+    assert ( schemaStore.getFileUriCount() == 1);
+    assert ( schemaStore.getByUri(file.toURI()).isPresent());
+    assert ( schemaStore.getById(schemaStore.getByUri(file.toURI()).get().getId()).isPresent());
+  }
 
-    @Test
-    public void indexApprove() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File file = new File("target/test-classes/activitystreams-schemas/verbs/approve.json");
-        schemaStore.create(file.toURI());
-        assert( schemaStore.getFileUriCount() == 4);
-        assert( schemaStore.getByUri(file.toURI()).isPresent());
-        assert( schemaStore.getById(schemaStore.getByUri(file.toURI()).get().getId()).isPresent());
-    }
+  @Test
+  public void indexApprove() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File file = new File("target/test-classes/activitystreams-schemas/verbs/approve.json");
+    schemaStore.create(file.toURI());
+    assert ( schemaStore.getFileUriCount() == 4);
+    assert ( schemaStore.getByUri(file.toURI()).isPresent());
+    assert ( schemaStore.getById(schemaStore.getByUri(file.toURI()).get().getId()).isPresent());
+  }
 
-    @Test
-    public void indexCollection() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File file = new File("target/test-classes/activitystreams-schemas/collection.json");
-        schemaStore.create(file.toURI());
-        assert( schemaStore.getFileUriCount() == 3);
-        assert( schemaStore.getByUri(file.toURI()).isPresent());
-        assert( schemaStore.getById(schemaStore.getByUri(file.toURI()).get().getId()).isPresent());
-        Schema collection = schemaStore.getByUri(file.toURI()).get();
-        assert( collection.getParent() == null );
-    }
+  @Test
+  public void indexCollection() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File file = new File("target/test-classes/activitystreams-schemas/collection.json");
+    schemaStore.create(file.toURI());
+    assert ( schemaStore.getFileUriCount() == 3);
+    assert ( schemaStore.getByUri(file.toURI()).isPresent());
+    assert ( schemaStore.getById(schemaStore.getByUri(file.toURI()).get().getId()).isPresent());
+    Schema collection = schemaStore.getByUri(file.toURI()).get();
+    assert ( collection.getParent() == null );
+  }
 
-    @Test
-    public void indexUpdate() {
-        SchemaStore schemaStore = new SchemaStoreImpl();
-        File file = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
-        schemaStore.create(file.toURI());
-        assert( schemaStore.getFileUriCount() == 4);
-        assert( schemaStore.getByUri(file.toURI()).isPresent());
-        assert( schemaStore.getById(schemaStore.getByUri(file.toURI()).get().getId()).isPresent());
-        Schema update = schemaStore.getByUri(file.toURI()).get();
-        assert( update.getParent() != null );
-        File parentFile = new File("target/test-classes/activitystreams-schemas/activity.json");
-        Schema parent = schemaStore.getByUri(parentFile.toURI()).get();
-        assert( parent != null );
-        assert( update.getParentURI().equals(parent.getURI()));
-    }
+  @Test
+  public void indexUpdate() {
+    SchemaStore schemaStore = new SchemaStoreImpl();
+    File file = new File("target/test-classes/activitystreams-schemas/verbs/update.json");
+    schemaStore.create(file.toURI());
+    assert ( schemaStore.getFileUriCount() == 4);
+    assert ( schemaStore.getByUri(file.toURI()).isPresent());
+    assert ( schemaStore.getById(schemaStore.getByUri(file.toURI()).get().getId()).isPresent());
+    Schema update = schemaStore.getByUri(file.toURI()).get();
+    assert ( update.getParent() != null );
+    File parentFile = new File("target/test-classes/activitystreams-schemas/activity.json");
+    Schema parent = schemaStore.getByUri(parentFile.toURI()).get();
+    assert ( parent != null );
+    assert ( update.getParentUri().equals(parent.getUri()));
+  }
 
-    // test create from messed up URI
+  // test create from messed up URI
 
-    // test create from URI with messed up reference
+  // test create from URI with messed up reference
 
 }



[40/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPGetProcessor.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPGetProcessor.java b/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPGetProcessor.java
index 5868ba6..871a08b 100644
--- a/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPGetProcessor.java
+++ b/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPGetProcessor.java
@@ -18,10 +18,20 @@
 
 package org.apache.streams.components.http.processor;
 
+import org.apache.streams.components.http.HttpProcessorConfiguration;
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.extensions.ExtensionUtil;
+import org.apache.streams.pojo.json.ActivityObject;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.base.Strings;
+
 import org.apache.commons.codec.binary.Base64;
 import org.apache.http.HttpEntity;
 import org.apache.http.client.methods.CloseableHttpResponse;
@@ -30,14 +40,6 @@ import org.apache.http.client.utils.URIBuilder;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClients;
 import org.apache.http.util.EntityUtils;
-import org.apache.streams.components.http.HttpProcessorConfiguration;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.extensions.ExtensionUtil;
-import org.apache.streams.pojo.json.ActivityObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -50,230 +52,249 @@ import java.util.List;
 import java.util.Map;
 
 /**
- * Processor retrieves contents from an known url and stores the resulting object in an extension field
+ * Processor retrieves contents from an known url and stores the resulting object in an extension field.
  */
 public class SimpleHTTPGetProcessor implements StreamsProcessor {
 
-    private final static String STREAMS_ID = "SimpleHTTPGetProcessor";
+  private static final String STREAMS_ID = "SimpleHTTPGetProcessor";
 
-    // from root config id
-    private final static String EXTENSION = "account_type";
+  // from root config id
+  private static final String EXTENSION = "account_type";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SimpleHTTPGetProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleHTTPGetProcessor.class);
 
-    protected ObjectMapper mapper;
+  protected ObjectMapper mapper;
 
-    protected URIBuilder uriBuilder;
+  protected URIBuilder uriBuilder;
 
-    protected CloseableHttpClient httpclient;
+  protected CloseableHttpClient httpclient;
 
-    protected HttpProcessorConfiguration configuration;
+  protected HttpProcessorConfiguration configuration;
 
-    protected String authHeader;
-    public SimpleHTTPGetProcessor() {
-        this(new ComponentConfigurator<>(HttpProcessorConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("http")));
-    }
+  protected String authHeader;
 
-    public SimpleHTTPGetProcessor(HttpProcessorConfiguration processorConfiguration) {
-        LOGGER.info("creating SimpleHTTPGetProcessor");
-        LOGGER.info(processorConfiguration.toString());
-        this.configuration = processorConfiguration;
-    }
+  /**
+   * SimpleHTTPGetProcessor constructor - resolves HttpProcessorConfiguration from JVM 'http'.
+   */
+  public SimpleHTTPGetProcessor() {
+    this(new ComponentConfigurator<>(HttpProcessorConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("http")));
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  /**
+   * SimpleHTTPGetProcessor constructor - uses provided HttpProcessorConfiguration.
+   */
+  public SimpleHTTPGetProcessor(HttpProcessorConfiguration processorConfiguration) {
+    LOGGER.info("creating SimpleHTTPGetProcessor");
+    LOGGER.info(processorConfiguration.toString());
+    this.configuration = processorConfiguration;
+  }
 
-    /**
-     Override this to store a result other than exact json representation of response
-     */
-    protected ObjectNode prepareExtensionFragment(String entityString) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        try {
-            return mapper.readValue(entityString, ObjectNode.class);
-        } catch (IOException e) {
-            LOGGER.warn(e.getMessage());
-            return null;
-        }
-    }
-
-    /**
-     Override this to place result in non-standard location on document
-     */
-    protected ObjectNode getRootDocument(StreamsDatum datum) {
-
-        try {
-            String json = datum.getDocument() instanceof String ?
-                    (String) datum.getDocument() :
-                    mapper.writeValueAsString(datum.getDocument());
-            return mapper.readValue(json, ObjectNode.class);
-        } catch (JsonProcessingException e) {
-            LOGGER.warn(e.getMessage());
-            return null;
-        } catch (IOException e) {
-            LOGGER.warn(e.getMessage());
-            return null;
-        }
+  /**
+   Override this to store a result other than exact json representation of response.
+   */
+  protected ObjectNode prepareExtensionFragment(String entityString) {
 
+    try {
+      return mapper.readValue(entityString, ObjectNode.class);
+    } catch (IOException ex) {
+      LOGGER.warn(ex.getMessage());
+      return null;
+    }
+  }
+
+  /**
+   Override this to place result in non-standard location on document.
+   */
+  protected ObjectNode getRootDocument(StreamsDatum datum) {
+
+    try {
+      String json = datum.getDocument() instanceof String
+          ? (String) datum.getDocument()
+          : mapper.writeValueAsString(datum.getDocument());
+      return mapper.readValue(json, ObjectNode.class);
+    } catch (JsonProcessingException ex) {
+      LOGGER.warn(ex.getMessage());
+      return null;
+    } catch (IOException ex) {
+      LOGGER.warn(ex.getMessage());
+      return null;
     }
 
-    /**
-     Override this to place result in non-standard location on document
-     */
-    protected ActivityObject getEntityToExtend(ObjectNode rootDocument) {
+  }
 
-        if( this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY))
-            return mapper.convertValue(rootDocument, ActivityObject.class);
-        else
-            return mapper.convertValue(rootDocument.get(this.configuration.getEntity().toString()), ActivityObject.class);
+  /**
+   Override this to place result in non-standard location on document.
+   */
+  protected ActivityObject getEntityToExtend(ObjectNode rootDocument) {
 
+    if ( this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY)) {
+      return mapper.convertValue(rootDocument, ActivityObject.class);
+    } else {
+      return mapper.convertValue(rootDocument.get(this.configuration.getEntity().toString()), ActivityObject.class);
     }
+  }
 
-    /**
-     Override this to place result in non-standard location on document
-     */
-    protected ObjectNode setEntityToExtend(ObjectNode rootDocument, ActivityObject activityObject) {
+  /**
+   Override this to place result in non-standard location on document.
+   */
+  protected ObjectNode setEntityToExtend(ObjectNode rootDocument, ActivityObject activityObject) {
 
-        if( this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY))
-            return mapper.convertValue(activityObject, ObjectNode.class);
-        else
-            rootDocument.set(this.configuration.getEntity().toString(), mapper.convertValue(activityObject, ObjectNode.class));
+    if ( this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY)) {
+      return mapper.convertValue(activityObject, ObjectNode.class);
+    } else {
+      rootDocument.set(this.configuration.getEntity().toString(), mapper.convertValue(activityObject, ObjectNode.class));
+    }
 
-        return rootDocument;
+    return rootDocument;
 
-    }
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        List<StreamsDatum> result = new ArrayList<>();
+    List<StreamsDatum> result = new ArrayList<>();
 
-        ObjectNode rootDocument = getRootDocument(entry);
+    ObjectNode rootDocument = getRootDocument(entry);
 
-        Map<String, String> params = prepareParams(entry);
+    Map<String, String> params = prepareParams(entry);
 
-        URI uri = prepareURI(params);
+    URI uri = prepareURI(params);
 
-        HttpGet httpget = prepareHttpGet(uri);
+    HttpGet httpget = prepareHttpGet(uri);
 
-        CloseableHttpResponse response = null;
+    CloseableHttpResponse response = null;
 
-        String entityString = null;
-        try {
-            response = httpclient.execute(httpget);
-            HttpEntity entity = response.getEntity();
-            // TODO: handle retry
-            if (response.getStatusLine().getStatusCode() == 200 && entity != null) {
-                entityString = EntityUtils.toString(entity);
-            }
-        } catch (IOException e) {
-            LOGGER.error("IO error:\n{}\n{}\n{}", uri.toString(), response, e.getMessage());
-            return result;
-        } finally {
-            try {
-                if (response != null) {
-                    response.close();
-                }
-            } catch (IOException ignored) {}
+    String entityString = null;
+    try {
+      response = httpclient.execute(httpget);
+      HttpEntity entity = response.getEntity();
+      // TODO: handle retry
+      if (response.getStatusLine().getStatusCode() == 200 && entity != null) {
+        entityString = EntityUtils.toString(entity);
+      }
+    } catch (IOException ex) {
+      LOGGER.error("IO error:\n{}\n{}\n{}", uri.toString(), response, ex.getMessage());
+      return result;
+    } finally {
+      try {
+        if (response != null) {
+          response.close();
         }
+      } catch (IOException ignored) {
+        LOGGER.trace("IOException", ignored);
+      }
+    }
 
-        if( entityString == null )
-            return result;
-
-        LOGGER.debug(entityString);
+    if( entityString == null ) {
+      return result;
+    }
 
-        ObjectNode extensionFragment = prepareExtensionFragment(entityString);
+    LOGGER.debug(entityString);
 
-        ActivityObject extensionEntity = getEntityToExtend(rootDocument);
+    ObjectNode extensionFragment = prepareExtensionFragment(entityString);
 
-        ExtensionUtil.getInstance().addExtension(extensionEntity, this.configuration.getExtension(), extensionFragment);
+    ActivityObject extensionEntity = getEntityToExtend(rootDocument);
 
-        rootDocument = setEntityToExtend(rootDocument, extensionEntity);
+    ExtensionUtil.getInstance().addExtension(extensionEntity, this.configuration.getExtension(), extensionFragment);
 
-        entry.setDocument(rootDocument);
+    rootDocument = setEntityToExtend(rootDocument, extensionEntity);
 
-        result.add(entry);
+    entry.setDocument(rootDocument);
 
-        return result;
+    result.add(entry);
 
-    }
+    return result;
 
-    /**
-     Override this to alter request URI
-     */
-    protected URI prepareURI(Map<String, String> params) {
+  }
 
-        URI uri = null;
-        for( Map.Entry<String,String> param : params.entrySet()) {
-            uriBuilder = uriBuilder.setParameter(param.getKey(), param.getValue());
-        }
-        try {
-            uri = uriBuilder.build();
-        } catch (URISyntaxException e) {
-            LOGGER.error("URI error {}", uriBuilder.toString());
-        }
-        return uri;
-    }
+  /**
+   Override this to alter request URI.
+   */
+  protected URI prepareURI(Map<String, String> params) {
 
-    /**
-     Override this to add parameters to the request
-     */
-    protected Map<String, String> prepareParams(StreamsDatum entry) {
-        return new HashMap<>();
+    URI uri = null;
+    for ( Map.Entry<String,String> param : params.entrySet()) {
+      uriBuilder = uriBuilder.setParameter(param.getKey(), param.getValue());
     }
-
-    /**
-     Override this to set a payload on the request
-     */
-    protected ObjectNode preparePayload(StreamsDatum entry) {
-        return null;
+    try {
+      uri = uriBuilder.build();
+    } catch (URISyntaxException ex) {
+      LOGGER.error("URI error {}", uriBuilder.toString());
     }
-
-    public HttpGet prepareHttpGet(URI uri) {
-        HttpGet httpget = new HttpGet(uri);
-        httpget.addHeader("content-type", this.configuration.getContentType());
-        if( !Strings.isNullOrEmpty(authHeader))
-            httpget.addHeader("Authorization", String.format("Basic %s", authHeader));
-        return httpget;
+    return uri;
+  }
+
+  /**
+   Override this to add parameters to the request.
+   */
+  protected Map<String, String> prepareParams(StreamsDatum entry) {
+    return new HashMap<>();
+  }
+
+  /**
+   Override this to set a payload on the request.
+   */
+  protected ObjectNode preparePayload(StreamsDatum entry) {
+    return null;
+  }
+
+  /**
+   * Override this to set the URI for the request or modify headers.
+   * @param uri uri
+   * @return result
+   */
+  public HttpGet prepareHttpGet(URI uri) {
+    HttpGet httpget = new HttpGet(uri);
+    httpget.addHeader("content-type", this.configuration.getContentType());
+    if ( !Strings.isNullOrEmpty(authHeader)) {
+      httpget.addHeader("Authorization", String.format("Basic %s", authHeader));
     }
+    return httpget;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-        mapper = StreamsJacksonMapper.getInstance();
+    mapper = StreamsJacksonMapper.getInstance();
 
-        uriBuilder = new URIBuilder()
-            .setScheme(this.configuration.getProtocol())
-            .setHost(this.configuration.getHostname())
-            .setPath(this.configuration.getResourcePath());
+    uriBuilder = new URIBuilder()
+        .setScheme(this.configuration.getProtocol())
+        .setHost(this.configuration.getHostname())
+        .setPath(this.configuration.getResourcePath());
 
-        if( !Strings.isNullOrEmpty(configuration.getAccessToken()) )
-            uriBuilder = uriBuilder.addParameter("access_token", configuration.getAccessToken());
-        if( !Strings.isNullOrEmpty(configuration.getUsername())
-            && !Strings.isNullOrEmpty(configuration.getPassword())) {
-            String string = configuration.getUsername() + ":" + configuration.getPassword();
-            authHeader = Base64.encodeBase64String(string.getBytes());
-        }
-        httpclient = HttpClients.createDefault();
+    if ( !Strings.isNullOrEmpty(configuration.getAccessToken()) ) {
+      uriBuilder = uriBuilder.addParameter("access_token", configuration.getAccessToken());
     }
-
-    @Override
-    public void cleanUp() {
-        LOGGER.info("shutting down SimpleHTTPGetProcessor");
-        try {
-            httpclient.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        } finally {
-            try {
-                httpclient.close();
-            } catch (IOException e) {
-                e.printStackTrace();
-            } finally {
-                httpclient = null;
-            }
-        }
+    if ( !Strings.isNullOrEmpty(configuration.getUsername())
+         &&
+         !Strings.isNullOrEmpty(configuration.getPassword())) {
+      String string = configuration.getUsername() + ":" + configuration.getPassword();
+      authHeader = Base64.encodeBase64String(string.getBytes());
+    }
+    httpclient = HttpClients.createDefault();
+  }
+
+  @Override
+  public void cleanUp() {
+    LOGGER.info("shutting down SimpleHTTPGetProcessor");
+    try {
+      httpclient.close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } finally {
+      try {
+        httpclient.close();
+      } catch (IOException e2) {
+        e2.printStackTrace();
+      } finally {
+        httpclient = null;
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPPostProcessor.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPPostProcessor.java b/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPPostProcessor.java
index f6089f6..1d52b5c 100644
--- a/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPPostProcessor.java
+++ b/streams-components/streams-http/src/main/java/org/apache/streams/components/http/processor/SimpleHTTPPostProcessor.java
@@ -52,225 +52,241 @@ import java.util.List;
 import java.util.Map;
 
 /**
- * Processor retrieves contents from an known url and stores the resulting object in an extension field
+ * Processor retrieves contents from an known url and stores the resulting object in an extension field.
  */
 public class SimpleHTTPPostProcessor implements StreamsProcessor {
 
-    private final static String STREAMS_ID = "SimpleHTTPPostProcessor";
+  private static final String STREAMS_ID = "SimpleHTTPPostProcessor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SimpleHTTPPostProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleHTTPPostProcessor.class);
 
-    protected ObjectMapper mapper;
+  protected ObjectMapper mapper;
 
-    protected URIBuilder uriBuilder;
+  protected URIBuilder uriBuilder;
 
-    protected CloseableHttpClient httpclient;
+  protected CloseableHttpClient httpclient;
 
-    protected HttpProcessorConfiguration configuration;
+  protected HttpProcessorConfiguration configuration;
 
-    protected String authHeader;
+  protected String authHeader;
 
-    public SimpleHTTPPostProcessor() {
-        this(new ComponentConfigurator<>(HttpProcessorConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("http")));
-    }
+  /**
+   * SimpleHTTPPostProcessor constructor - resolves HttpProcessorConfiguration from JVM 'http'.
+   */
+  public SimpleHTTPPostProcessor() {
+    this(new ComponentConfigurator<>(HttpProcessorConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("http")));
+  }
 
-    public SimpleHTTPPostProcessor(HttpProcessorConfiguration processorConfiguration) {
-        LOGGER.info("creating SimpleHTTPPostProcessor");
-        LOGGER.info(processorConfiguration.toString());
-        this.configuration = processorConfiguration;
-    }
+  /**
+   * SimpleHTTPPostProcessor constructor - uses provided HttpProcessorConfiguration.
+   */
+  public SimpleHTTPPostProcessor(HttpProcessorConfiguration processorConfiguration) {
+    LOGGER.info("creating SimpleHTTPPostProcessor");
+    LOGGER.info(processorConfiguration.toString());
+    this.configuration = processorConfiguration;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    /**
-     Override this to store a result other than exact json representation of response
-     */
-    protected ObjectNode prepareExtensionFragment(String entityString) {
+  /**
+   Override this to store a result other than exact json representation of response.
+   */
+  protected ObjectNode prepareExtensionFragment(String entityString) {
 
-        try {
-            return mapper.readValue(entityString, ObjectNode.class);
-        } catch (IOException e) {
-            LOGGER.warn("IOException", e);
-            return null;
-        }
+    try {
+      return mapper.readValue(entityString, ObjectNode.class);
+    } catch (IOException ex) {
+      LOGGER.warn("IOException", ex);
+      return null;
     }
-
-    /**
-     Override this to place result in non-standard location on document
-     */
-    protected ObjectNode getRootDocument(StreamsDatum datum) {
-
-        try {
-            String json = datum.getDocument() instanceof String ?
-                    (String) datum.getDocument() :
-                    mapper.writeValueAsString(datum.getDocument());
-            return mapper.readValue(json, ObjectNode.class);
-        } catch (JsonProcessingException e) {
-            LOGGER.warn("JsonProcessingException", e);
-            return null;
-        } catch (IOException e) {
-            LOGGER.warn("IOException", e);
-            return null;
-        }
-
+  }
+
+  /**
+   Override this to place result in non-standard location on document.
+   */
+  protected ObjectNode getRootDocument(StreamsDatum datum) {
+
+    try {
+      String json = datum.getDocument() instanceof String
+          ? (String) datum.getDocument()
+          : mapper.writeValueAsString(datum.getDocument());
+      return mapper.readValue(json, ObjectNode.class);
+    } catch (JsonProcessingException ex) {
+      LOGGER.warn("JsonProcessingException", ex);
+      return null;
+    } catch (IOException ex) {
+      LOGGER.warn("IOException", ex);
+      return null;
     }
 
-    /**
-     Override this to place result in non-standard location on document
-     */
-    protected ActivityObject getEntityToExtend(ObjectNode rootDocument) {
+  }
 
-        if( this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY))
-            return mapper.convertValue(rootDocument, ActivityObject.class);
-        else
-            return mapper.convertValue(rootDocument.get(this.configuration.getEntity().toString()), ActivityObject.class);
+  /**
+   Override this to place result in non-standard location on document.
+   */
+  protected ActivityObject getEntityToExtend(ObjectNode rootDocument) {
 
+    if ( this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY)) {
+      return mapper.convertValue(rootDocument, ActivityObject.class);
+    } else {
+      return mapper.convertValue(rootDocument.get(this.configuration.getEntity().toString()), ActivityObject.class);
     }
+  }
 
-    /**
-     Override this to place result in non-standard location on document
-     */
-    protected ObjectNode setEntityToExtend(ObjectNode rootDocument, ActivityObject activityObject) {
-
-        if( this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY))
-            return mapper.convertValue(activityObject, ObjectNode.class);
-        else
-            rootDocument.set(this.configuration.getEntity().toString(), mapper.convertValue(activityObject, ObjectNode.class));
-
-        return rootDocument;
+  /**
+   Override this to place result in non-standard location on document.
+   */
+  protected ObjectNode setEntityToExtend(ObjectNode rootDocument, ActivityObject activityObject) {
 
+    if ( this.configuration.getEntity().equals(HttpProcessorConfiguration.Entity.ACTIVITY)) {
+      return mapper.convertValue(activityObject, ObjectNode.class);
+    } else {
+      rootDocument.set(this.configuration.getEntity().toString(), mapper.convertValue(activityObject, ObjectNode.class));
     }
+    return rootDocument;
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  }
 
-        List<StreamsDatum> result = new ArrayList<>();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        ObjectNode rootDocument = getRootDocument(entry);
+    List<StreamsDatum> result = new ArrayList<>();
 
-        Map<String, String> params = prepareParams(entry);
+    ObjectNode rootDocument = getRootDocument(entry);
 
-        URI uri;
-        for( Map.Entry<String,String> param : params.entrySet()) {
-            uriBuilder = uriBuilder.setParameter(param.getKey(), param.getValue());
-        }
-        try {
-            uri = uriBuilder.build();
-        } catch (URISyntaxException e) {
-            LOGGER.error("URI error {}", uriBuilder.toString(), e);
-            return result;
-        }
+    Map<String, String> params = prepareParams(entry);
 
-        HttpEntity payload = preparePayload(entry);
-
-        HttpPost httpPost = prepareHttpPost(uri, payload);
-
-        CloseableHttpResponse response = null;
-
-        String entityString = null;
-        try {
-            response = httpclient.execute(httpPost);
-            HttpEntity entity = response.getEntity();
-            // TODO: handle retry
-            if (response.getStatusLine().getStatusCode() == 200 && entity != null) {
-                entityString = EntityUtils.toString(entity);
-            }
-        } catch (IOException e) {
-            LOGGER.error("IO error:\n{}\n{}\n{}", uri.toString(), response, e);
-            return result;
-        } finally {
-            try {
-                if (response != null) {
-                    response.close();
-                }
-            } catch (IOException ignored) {}
-        }
+    URI uri;
+    for ( Map.Entry<String,String> param : params.entrySet() ) {
+      uriBuilder = uriBuilder.setParameter(param.getKey(), param.getValue());
+    }
+    try {
+      uri = uriBuilder.build();
+    } catch (URISyntaxException ex) {
+      LOGGER.error("URI error {}", uriBuilder.toString(), ex);
+      return result;
+    }
 
-        if( entityString == null )
-            return result;
+    HttpEntity payload = preparePayload(entry);
+
+    HttpPost httpPost = prepareHttpPost(uri, payload);
+
+    CloseableHttpResponse response = null;
+
+    String entityString = null;
+    try {
+      response = httpclient.execute(httpPost);
+      HttpEntity entity = response.getEntity();
+      // TODO: handle retry
+      if (response.getStatusLine().getStatusCode() == 200 && entity != null) {
+        entityString = EntityUtils.toString(entity);
+      }
+    } catch (IOException ex) {
+      LOGGER.error("IO error:\n{}\n{}\n{}", uri.toString(), response, ex);
+      return result;
+    } finally {
+      try {
+        if (response != null) {
+          response.close();
+        }
+      } catch (IOException ignored) {
+        LOGGER.trace("IOException", ignored);
+      }
+    }
 
-        LOGGER.debug(entityString);
+    if ( entityString == null ) {
+      return result;
+    }
 
-        ObjectNode extensionFragment = prepareExtensionFragment(entityString);
+    LOGGER.debug(entityString);
 
-        ActivityObject extensionEntity = getEntityToExtend(rootDocument);
+    ObjectNode extensionFragment = prepareExtensionFragment(entityString);
 
-        ExtensionUtil.getInstance().addExtension(extensionEntity, this.configuration.getExtension(), extensionFragment);
+    ActivityObject extensionEntity = getEntityToExtend(rootDocument);
 
-        rootDocument = setEntityToExtend(rootDocument, extensionEntity);
+    ExtensionUtil.getInstance().addExtension(extensionEntity, this.configuration.getExtension(), extensionFragment);
 
-        entry.setDocument(rootDocument);
+    rootDocument = setEntityToExtend(rootDocument, extensionEntity);
 
-        result.add(entry);
+    entry.setDocument(rootDocument);
 
-        return result;
+    result.add(entry);
 
-    }
+    return result;
 
-    /**
-     Override this to add parameters to the request
-     */
-    protected Map<String, String> prepareParams(StreamsDatum entry) {
-        return new HashMap<>();
-    }
+  }
 
-    /**
-     Override this to add parameters to the request
-     */
-    protected HttpEntity preparePayload(StreamsDatum entry) {
-        return new StringEntity("{}",
-                ContentType.create("application/json"));
-    }
+  /**
+   Override this to add parameters to the request.
+   */
+  protected Map<String, String> prepareParams(StreamsDatum entry) {
+    return new HashMap<>();
+  }
 
+  /**
+   Override this to add parameters to the request.
+   */
+  protected HttpEntity preparePayload(StreamsDatum entry) {
+    return new StringEntity("{}",
+        ContentType.create("application/json"));
+  }
 
-    public HttpPost prepareHttpPost(URI uri, HttpEntity entity) {
-        HttpPost httpPost = new HttpPost(uri);
-        httpPost.addHeader("content-type", this.configuration.getContentType());
-        if( !Strings.isNullOrEmpty(authHeader))
-            httpPost.addHeader("Authorization", String.format("Basic %s", authHeader));
-        httpPost.setEntity(entity);
-        return httpPost;
+  /**
+   * Override this to set the URI / entity for the request or modify headers.
+   * @param uri uri
+   * @param entity entity
+   * @return result
+   */
+  public HttpPost prepareHttpPost(URI uri, HttpEntity entity) {
+    HttpPost httpPost = new HttpPost(uri);
+    httpPost.addHeader("content-type", this.configuration.getContentType());
+    if ( !Strings.isNullOrEmpty(authHeader)) {
+      httpPost.addHeader("Authorization", String.format("Basic %s", authHeader));
     }
+    httpPost.setEntity(entity);
+    return httpPost;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-        mapper = StreamsJacksonMapper.getInstance();
+    mapper = StreamsJacksonMapper.getInstance();
 
-        uriBuilder = new URIBuilder()
-            .setScheme(this.configuration.getProtocol())
-            .setHost(this.configuration.getHostname())
-            .setPath(this.configuration.getResourcePath());
+    uriBuilder = new URIBuilder()
+        .setScheme(this.configuration.getProtocol())
+        .setHost(this.configuration.getHostname())
+        .setPath(this.configuration.getResourcePath());
 
-        if( !Strings.isNullOrEmpty(configuration.getAccessToken()) )
-            uriBuilder = uriBuilder.addParameter("access_token", configuration.getAccessToken());
-        if( !Strings.isNullOrEmpty(configuration.getUsername())
-            && !Strings.isNullOrEmpty(configuration.getPassword())) {
-            String string = configuration.getUsername() + ":" + configuration.getPassword();
-            authHeader = Base64.encodeBase64String(string.getBytes());
-        }
-        httpclient = HttpClients.createDefault();
+    if ( !Strings.isNullOrEmpty(configuration.getAccessToken()) ) {
+      uriBuilder = uriBuilder.addParameter("access_token", configuration.getAccessToken());
     }
-
-    @Override
-    public void cleanUp() {
-        LOGGER.info("shutting down SimpleHTTPPostProcessor");
-        try {
-            httpclient.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        } finally {
-            try {
-                httpclient.close();
-            } catch (IOException e) {
-                LOGGER.error("IOException", e);
-            } finally {
-                httpclient = null;
-            }
-        }
+    if ( !Strings.isNullOrEmpty(configuration.getUsername())
+        && !Strings.isNullOrEmpty(configuration.getPassword())) {
+      String string = configuration.getUsername() + ":" + configuration.getPassword();
+      authHeader = Base64.encodeBase64String(string.getBytes());
+    }
+    httpclient = HttpClients.createDefault();
+  }
+
+  @Override
+  public void cleanUp() {
+    LOGGER.info("shutting down SimpleHTTPPostProcessor");
+    try {
+      httpclient.close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } finally {
+      try {
+        httpclient.close();
+      } catch (IOException e2) {
+        LOGGER.error("IOException", e2);
+      } finally {
+        httpclient = null;
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-http/src/main/java/org/apache/streams/components/http/provider/SimpleHttpProvider.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-http/src/main/java/org/apache/streams/components/http/provider/SimpleHttpProvider.java b/streams-components/streams-http/src/main/java/org/apache/streams/components/http/provider/SimpleHttpProvider.java
index 2078647..ab11a68 100644
--- a/streams-components/streams-http/src/main/java/org/apache/streams/components/http/provider/SimpleHttpProvider.java
+++ b/streams-components/streams-http/src/main/java/org/apache/streams/components/http/provider/SimpleHttpProvider.java
@@ -69,269 +69,287 @@ import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 /**
- * Provider retrieves contents from an known set of urls and passes all resulting objects downstream
+ * Provider retrieves contents from an known set of urls and passes all resulting objects downstream.
  */
 public class SimpleHttpProvider implements StreamsProvider {
 
-    private final static String STREAMS_ID = "SimpleHttpProvider";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(SimpleHttpProvider.class);
-
-    protected ObjectMapper mapper;
-
-    protected URIBuilder uriBuilder;
-
-    protected CloseableHttpClient httpclient;
+  private static final String STREAMS_ID = "SimpleHttpProvider";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleHttpProvider.class);
+
+  protected ObjectMapper mapper;
+
+  protected URIBuilder uriBuilder;
+
+  protected CloseableHttpClient httpclient;
+
+  protected HttpProviderConfiguration configuration;
+
+  protected volatile Queue<StreamsDatum> providerQueue = new ConcurrentLinkedQueue<>();
+
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+
+  private ExecutorService executor;
+
+  /**
+   * SimpleHttpProvider constructor - resolves HttpProcessorConfiguration from JVM 'http'.
+   */
+  public SimpleHttpProvider() {
+    this(new ComponentConfigurator<>(HttpProviderConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("http")));
+  }
+
+  /**
+   * SimpleHttpProvider constructor - uses provided HttpProviderConfiguration.
+   */
+  public SimpleHttpProvider(HttpProviderConfiguration providerConfiguration) {
+    LOGGER.info("creating SimpleHttpProvider");
+    LOGGER.info(providerConfiguration.toString());
+    this.configuration = providerConfiguration;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  /**
+   Override this to add parameters to the request.
+   */
+  protected Map<String, String> prepareParams(StreamsDatum entry) {
+    return new HashMap<>();
+  }
+
+  /**
+   * prepareHttpRequest
+   * @param uri uri
+   * @return result
+   */
+  public HttpRequestBase prepareHttpRequest(URI uri) {
+    HttpRequestBase request;
+    if ( configuration.getRequestMethod().equals(HttpProviderConfiguration.RequestMethod.GET)) {
+      request = new HttpGet(uri);
+    } else if ( configuration.getRequestMethod().equals(HttpProviderConfiguration.RequestMethod.POST)) {
+      request = new HttpPost(uri);
+    } else {
+      // this shouldn't happen because of the default
+      request = new HttpGet(uri);
+    }
 
-    protected HttpProviderConfiguration configuration;
+    request.addHeader("content-type", this.configuration.getContentType());
 
-    protected volatile Queue<StreamsDatum> providerQueue = new ConcurrentLinkedQueue<>();
+    return request;
 
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+  }
 
-    private ExecutorService executor;
+  @Override
+  public void prepare(Object configurationObject) {
 
-    public SimpleHttpProvider() {
-        this(new ComponentConfigurator<>(HttpProviderConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("http")));
-    }
+    mapper = StreamsJacksonMapper.getInstance();
 
-    public SimpleHttpProvider(HttpProviderConfiguration providerConfiguration) {
-        LOGGER.info("creating SimpleHttpProvider");
-        LOGGER.info(providerConfiguration.toString());
-        this.configuration = providerConfiguration;
-    }
+    uriBuilder = new URIBuilder()
+        .setScheme(this.configuration.getProtocol())
+        .setHost(this.configuration.getHostname())
+        .setPort(this.configuration.getPort().intValue())
+        .setPath(this.configuration.getResourcePath());
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+    SSLContextBuilder builder = new SSLContextBuilder();
+    SSLConnectionSocketFactory sslsf = null;
+    try {
+      builder.loadTrustMaterial(null, new TrustSelfSignedStrategy());
+      sslsf = new SSLConnectionSocketFactory(
+          builder.build(), SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
+    } catch (NoSuchAlgorithmException | KeyManagementException | KeyStoreException ex) {
+      LOGGER.warn(ex.getMessage());
     }
 
-    /**
-      Override this to add parameters to the request
-     */
-    protected Map<String, String> prepareParams(StreamsDatum entry) {
-        return new HashMap<>();
+    httpclient = HttpClients.custom().setSSLSocketFactory(
+        sslsf).build();
+
+    executor = Executors.newSingleThreadExecutor();
+
+  }
+
+  @Override
+  public void cleanUp() {
+
+    LOGGER.info("shutting down SimpleHttpProvider");
+    this.shutdownAndAwaitTermination(executor);
+    try {
+      httpclient.close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } finally {
+      try {
+        httpclient.close();
+      } catch (IOException ex) {
+        ex.printStackTrace();
+      } finally {
+        httpclient = null;
+      }
     }
+  }
 
-    public HttpRequestBase prepareHttpRequest(URI uri) {
-        HttpRequestBase request;
-        if( configuration.getRequestMethod().equals(HttpProviderConfiguration.RequestMethod.GET)) {
-            request = new HttpGet(uri);
-        } else if( configuration.getRequestMethod().equals(HttpProviderConfiguration.RequestMethod.POST)) {
-            request = new HttpPost(uri);
-        } else {
-            // this shouldn't happen because of the default
-            request = new HttpGet(uri);
-        }
-
-        request.addHeader("content-type", this.configuration.getContentType());
+  @Override
+  public void startStream() {
 
-        return request;
+    executor.execute(new Runnable() {
+      @Override
+      public void run() {
 
-    }
+        readCurrent();
 
-    @Override
-    public void prepare(Object configurationObject) {
-
-        mapper = StreamsJacksonMapper.getInstance();
-
-        uriBuilder = new URIBuilder()
-            .setScheme(this.configuration.getProtocol())
-            .setHost(this.configuration.getHostname())
-            .setPort(this.configuration.getPort().intValue())
-            .setPath(this.configuration.getResourcePath());
-
-        SSLContextBuilder builder = new SSLContextBuilder();
-        SSLConnectionSocketFactory sslsf = null;
-        try {
-            builder.loadTrustMaterial(null, new TrustSelfSignedStrategy());
-            sslsf = new SSLConnectionSocketFactory(
-                    builder.build(), SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
-        } catch (NoSuchAlgorithmException | KeyManagementException | KeyStoreException e) {
-            LOGGER.warn(e.getMessage());
-        }
+        Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS);
 
-        httpclient = HttpClients.custom().setSSLSocketFactory(
-                sslsf).build();
+      }
+    });
+  }
 
-        executor = Executors.newSingleThreadExecutor();
+  @Override
+  public StreamsResultSet readCurrent() {
+    StreamsResultSet current;
 
-    }
+    uriBuilder = uriBuilder.setPath(
+        Joiner.on("/").skipNulls().join(uriBuilder.getPath(), configuration.getResource(), configuration.getResourcePostfix())
+    );
 
-    @Override
-    public void cleanUp() {
-
-        LOGGER.info("shutting down SimpleHttpProvider");
-        this.shutdownAndAwaitTermination(executor);
-        try {
-            httpclient.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        } finally {
-            try {
-                httpclient.close();
-            } catch (IOException e) {
-                e.printStackTrace();
-            } finally {
-                httpclient = null;
-            }
-        }
+    URI uri;
+    try {
+      uri = uriBuilder.build();
+    } catch (URISyntaxException ex) {
+      uri = null;
     }
 
-    @Override
-    public void startStream() {
-
-        executor.execute(new Runnable() {
-            @Override
-            public void run() {
+    List<ObjectNode> results = execute(uri);
 
-                readCurrent();
+    lock.writeLock().lock();
 
-                Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS);
-
-            }
-        });
+    for ( ObjectNode item : results ) {
+      providerQueue.add(newDatum(item));
     }
 
-    @Override
-    public StreamsResultSet readCurrent() {
-        StreamsResultSet current;
-
-        uriBuilder = uriBuilder.setPath(
-                Joiner.on("/").skipNulls().join(uriBuilder.getPath(), configuration.getResource(), configuration.getResourcePostfix())
-        );
+    LOGGER.debug("Creating new result set for {} items", providerQueue.size());
+    current = new StreamsResultSet(providerQueue);
 
-        URI uri;
-        try {
-            uri = uriBuilder.build();
-        } catch (URISyntaxException e) {
-            uri = null;
-        }
-
-        List<ObjectNode> results = execute(uri);
-
-        lock.writeLock().lock();
-
-        for( ObjectNode item : results ) {
-            providerQueue.add(newDatum(item));
-        }
-
-        LOGGER.debug("Creating new result set for {} items", providerQueue.size());
-        current = new StreamsResultSet(providerQueue);
+    return current;
+  }
 
-        return current;
-    }
+  protected List<ObjectNode> execute(URI uri) {
 
-    protected List<ObjectNode> execute(URI uri) {
-
-        Preconditions.checkNotNull(uri);
-
-        List<ObjectNode> results = new ArrayList<>();
-
-        HttpRequestBase httpRequest = prepareHttpRequest(uri);
-
-        CloseableHttpResponse response = null;
-
-        String entityString;
-        try {
-            response = httpclient.execute(httpRequest);
-            HttpEntity entity = response.getEntity();
-            // TODO: handle retry
-            if (response.getStatusLine().getStatusCode() == 200 && entity != null) {
-                entityString = EntityUtils.toString(entity);
-                if( !entityString.equals("{}") && !entityString.equals("[]") ) {
-                    JsonNode jsonNode = mapper.readValue(entityString, JsonNode.class);
-                    results = parse(jsonNode);
-                }
-            }
-        } catch (IOException e) {
-            LOGGER.error("IO error:\n{}\n{}\n{}", uri.toString(), response, e.getMessage());
-        } finally {
-            try {
-                if (response != null) {
-                    response.close();
-                }
-            } catch (IOException ignored) {}
-        }
-        return results;
-    }
+    Preconditions.checkNotNull(uri);
 
-    /**
-     Override this to change how entity gets converted to objects
-     */
-    protected List<ObjectNode> parse(JsonNode jsonNode) {
+    List<ObjectNode> results = new ArrayList<>();
 
-        List<ObjectNode> results = new ArrayList<>();
+    HttpRequestBase httpRequest = prepareHttpRequest(uri);
 
-        if (jsonNode != null && jsonNode instanceof ObjectNode ) {
-            results.add((ObjectNode) jsonNode);
-        } else if (jsonNode != null && jsonNode instanceof ArrayNode) {
-            ArrayNode arrayNode = (ArrayNode) jsonNode;
-            Iterator<JsonNode> iterator = arrayNode.elements();
-            while (iterator.hasNext()) {
-                ObjectNode element = (ObjectNode) iterator.next();
+    CloseableHttpResponse response = null;
 
-                results.add(element);
-            }
+    String entityString;
+    try {
+      response = httpclient.execute(httpRequest);
+      HttpEntity entity = response.getEntity();
+      // TODO: handle retry
+      if (response.getStatusLine().getStatusCode() == 200 && entity != null) {
+        entityString = EntityUtils.toString(entity);
+        if ( !entityString.equals("{}") && !entityString.equals("[]") ) {
+          JsonNode jsonNode = mapper.readValue(entityString, JsonNode.class);
+          results = parse(jsonNode);
         }
-
-        return results;
-    }
-
-    /**
-     Override this to change how metadata is derived from object
-     */
-    protected StreamsDatum newDatum(ObjectNode item) {
-        try {
-            String id = null;
-            if( item.get("id") != null )
-                id = item.get("id").asText();
-            DateTime timestamp = null;
-            if( item.get("timestamp") != null )
-                timestamp = new DateTime(item.get("timestamp").asText());
-            if( id != null && timestamp != null )
-                return new StreamsDatum(item, id, timestamp);
-            else if( id != null )
-                return new StreamsDatum(item, id);
-            else if( timestamp != null )
-                return new StreamsDatum(item, null, timestamp);
-            else return new StreamsDatum(item);
-        } catch( Exception e ) {
-            return new StreamsDatum(item);
+      }
+    } catch (IOException ex) {
+      LOGGER.error("IO error:\n{}\n{}\n{}", uri.toString(), response, ex.getMessage());
+    } finally {
+      try {
+        if (response != null) {
+          response.close();
         }
+      } catch (IOException ignored) {
+        LOGGER.trace("IOException", ignored);
+      }
     }
-
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
-
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
+    return results;
+  }
+
+  /**
+   Override this to change how entity gets converted to objects.
+   */
+  protected List<ObjectNode> parse(JsonNode jsonNode) {
+
+    List<ObjectNode> results = new ArrayList<>();
+
+    if (jsonNode != null && jsonNode instanceof ObjectNode ) {
+      results.add((ObjectNode) jsonNode);
+    } else if (jsonNode != null && jsonNode instanceof ArrayNode) {
+      ArrayNode arrayNode = (ArrayNode) jsonNode;
+      Iterator<JsonNode> iterator = arrayNode.elements();
+      while (iterator.hasNext()) {
+        ObjectNode element = (ObjectNode) iterator.next();
+
+        results.add(element);
+      }
     }
 
-    @Override
-    public boolean isRunning() {
-        return true;
+    return results;
+  }
+
+  /**
+   Override this to change how metadata is derived from object.
+   */
+  protected StreamsDatum newDatum(ObjectNode item) {
+    try {
+      String id = null;
+      if ( item.get("id") != null ) {
+        id = item.get("id").asText();
+      }
+      DateTime timestamp = null;
+      if ( item.get("timestamp") != null ) {
+        timestamp = new DateTime(item.get("timestamp").asText());
+      }
+      if ( id != null && timestamp != null ) {
+        return new StreamsDatum(item, id, timestamp);
+      } else if ( id != null ) {
+        return new StreamsDatum(item, id);
+      } else if ( timestamp != null ) {
+        return new StreamsDatum(item, null, timestamp);
+      } else {
+        return new StreamsDatum(item);
+      }
+    } catch ( Exception ex ) {
+      return new StreamsDatum(item);
     }
-
-    protected void shutdownAndAwaitTermination(ExecutorService pool) {
-        pool.shutdown(); // Disable new tasks from being submitted
-        try {
-            // Wait a while for existing tasks to terminate
-            if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
-                pool.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!pool.awaitTermination(10, TimeUnit.SECONDS))
-                    LOGGER.error("Pool did not terminate");
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            pool.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return true;
+  }
+
+  protected void shutdownAndAwaitTermination(ExecutorService pool) {
+    pool.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+        pool.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+          LOGGER.error("Pool did not terminate");
         }
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      pool.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-http/src/test/java/SimpleHTTPPostPersistWriterTest.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-http/src/test/java/SimpleHTTPPostPersistWriterTest.java b/streams-components/streams-http/src/test/java/SimpleHTTPPostPersistWriterTest.java
index 55e338d..2333c4b 100644
--- a/streams-components/streams-http/src/test/java/SimpleHTTPPostPersistWriterTest.java
+++ b/streams-components/streams-http/src/test/java/SimpleHTTPPostPersistWriterTest.java
@@ -16,16 +16,18 @@
  * under the License.
  */
 
+import org.apache.streams.components.http.HttpPersistWriterConfiguration;
+import org.apache.streams.components.http.persist.SimpleHTTPPostPersistWriter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
+
 import org.apache.http.client.methods.CloseableHttpResponse;
 import org.apache.http.client.methods.HttpUriRequest;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClients;
-import org.apache.streams.components.http.HttpPersistWriterConfiguration;
-import org.apache.streams.components.http.persist.SimpleHTTPPostPersistWriter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -47,87 +49,91 @@ import static org.mockito.Matchers.any;
 @PrepareForTest({HttpClients.class, CloseableHttpResponse.class, CloseableHttpResponse.class})
 public class SimpleHTTPPostPersistWriterTest {
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    /**
-     * test port.
-     */
-    private static final int PORT = 18080;
-
-    /**
-     * test hosts.
-     */
-    private static final String HOSTNAME = "localhost";
-
-    /**
-     * test protocol.
-     */
-    private static final String PROTOCOL = "http";
-
-    /**
-     * CloseableHttpClient mock.
-     */
-    private CloseableHttpClient client;
-
-    /**
-     * CloseableHttpClient mock.
-     */
-    private CloseableHttpResponse response = Mockito.mock(CloseableHttpResponse.class);
-
-    /**
-     * Our output.
-     */
-    private ByteArrayOutputStream output;
-
-    /**
-     * Our input.
-     */
-    private ByteArrayInputStream input;
-
-    @Before
-    public void setUp() throws Exception
-    {
-        /*
-      HttpClients mock.
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  /**
+   * test port.
+   */
+  private static final int PORT = 18080;
+
+  /**
+   * test hosts.
+   */
+  private static final String HOSTNAME = "localhost";
+
+  /**
+   * test protocol.
+   */
+  private static final String PROTOCOL = "http";
+
+  /**
+   * CloseableHttpClient mock.
+   */
+  private CloseableHttpClient client;
+
+  /**
+   * CloseableHttpClient mock.
+   */
+  private CloseableHttpResponse response = Mockito.mock(CloseableHttpResponse.class);
+
+  /**
+   * Our output.
+   */
+  private ByteArrayOutputStream output;
+
+  /**
+   * Our input.
+   */
+  private ByteArrayInputStream input;
+
+  @Before
+  public void setUp() throws Exception
+  {
+    /*
+     HttpClients mock.
      */
-        this.client = PowerMockito.mock(CloseableHttpClient.class);
+    this.client = PowerMockito.mock(CloseableHttpClient.class);
 
-        PowerMockito.mockStatic(HttpClients.class);
+    PowerMockito.mockStatic(HttpClients.class);
 
-        PowerMockito.when(HttpClients.createDefault())
-                .thenReturn(client);
+    PowerMockito.when(HttpClients.createDefault())
+        .thenReturn(client);
 
-        PowerMockito.when(client.execute(any(HttpUriRequest.class)))
-                .thenReturn(response);
+    PowerMockito.when(client.execute(any(HttpUriRequest.class)))
+        .thenReturn(response);
 
-        Mockito.when(response.getEntity()).thenReturn(null);
-        Mockito.doNothing().when(response).close();
+    Mockito.when(response.getEntity()).thenReturn(null);
+    Mockito.doNothing().when(response).close();
 
-    }
+  }
 
-    @Test
-    public void testPersist() throws Exception
-    {
-        HttpPersistWriterConfiguration configuration = new HttpPersistWriterConfiguration();
-        configuration.setProtocol(PROTOCOL);
-        configuration.setHostname(HOSTNAME);
-        configuration.setPort((long) PORT);
-        configuration.setResourcePath("/");
+  /**
+   * testPersist.
+   * @throws Exception
+   */
+  @Test
+  public void testPersist() throws Exception
+  {
+    HttpPersistWriterConfiguration configuration = new HttpPersistWriterConfiguration();
+    configuration.setProtocol(PROTOCOL);
+    configuration.setHostname(HOSTNAME);
+    configuration.setPort((long) PORT);
+    configuration.setResourcePath("/");
 
-        /*
-      Instance under tests.
+    /*
+     Instance under tests.
      */
-        SimpleHTTPPostPersistWriter writer = new SimpleHTTPPostPersistWriter(configuration);
+    SimpleHTTPPostPersistWriter writer = new SimpleHTTPPostPersistWriter(configuration);
 
-        writer.prepare(null);
+    writer.prepare(null);
 
-        StreamsDatum testDatum = new StreamsDatum(mapper.readValue("{\"message\":\"ping\"}", ObjectNode.class));
+    StreamsDatum testDatum = new StreamsDatum(mapper.readValue("{\"message\":\"ping\"}", ObjectNode.class));
 
-        writer.write(testDatum);
+    writer.write(testDatum);
 
-        Mockito.verify(this.client).execute(any(HttpUriRequest.class));
+    Mockito.verify(this.client).execute(any(HttpUriRequest.class));
 
-        Mockito.verify(this.response).close();
+    Mockito.verify(this.response).close();
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-config/src/main/java/org/apache/streams/config/ComponentConfigurator.java
----------------------------------------------------------------------
diff --git a/streams-config/src/main/java/org/apache/streams/config/ComponentConfigurator.java b/streams-config/src/main/java/org/apache/streams/config/ComponentConfigurator.java
index 42b70a6..5eea60e 100644
--- a/streams-config/src/main/java/org/apache/streams/config/ComponentConfigurator.java
+++ b/streams-config/src/main/java/org/apache/streams/config/ComponentConfigurator.java
@@ -19,9 +19,9 @@
 package org.apache.streams.config;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.reflect.TypeToken;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigRenderOptions;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,44 +30,64 @@ import java.io.Serializable;
 /**
  * ComponentConfigurator supplies serializable configuration beans derived from a specified typesafe path or object.
  *
+ * <p/>
  * Typically a component will select a 'default' typesafe path to be used if no other path or object is provided.
  *
+ * <p/>
  * For example, streams-persist-elasticsearch will use 'elasticsearch' by default, but an implementation
- *   such as github.com/w2ogroup/elasticsearch-reindex can resolve a reader from elasticsearch.source
- *   and a writer from elasticsearch.destination
+ *   such as github.com/apache/streams-examples/local/elasticsearch-reindex
+ *   can resolve a reader from elasticsearch.source
+ *   and a writer from elasticsearch.destination.
  *
  */
 public class ComponentConfigurator<T extends Serializable> {
 
-    private Class<T> configClass;
-    public ComponentConfigurator(Class<T> configClass) {
-        this.configClass = configClass;
-    }
+  private Class<T> configClass;
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ComponentConfigurator.class);
+  public ComponentConfigurator(Class<T> configClass) {
+    this.configClass = configClass;
+  }
 
-    private final static ObjectMapper mapper = new ObjectMapper();
+  private static final Logger LOGGER = LoggerFactory.getLogger(ComponentConfigurator.class);
 
-    public T detectConfiguration(Config typesafeConfig) {
+  private static final ObjectMapper mapper = new ObjectMapper();
 
-        T pojoConfig = null;
+  /**
+   * resolve a serializable configuration pojo from a given typesafe config object.
+   * @param typesafeConfig typesafeConfig
+   * @return result
+   */
+  public T detectConfiguration(Config typesafeConfig) {
 
-        try {
-            pojoConfig = mapper.readValue(typesafeConfig.root().render(ConfigRenderOptions.concise()), configClass);
-        } catch (Exception e) {
-            e.printStackTrace();
-            LOGGER.warn("Could not parse:", typesafeConfig);
-        }
+    T pojoConfig = null;
 
-        return pojoConfig;
+    try {
+      pojoConfig = mapper.readValue(typesafeConfig.root().render(ConfigRenderOptions.concise()), configClass);
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      LOGGER.warn("Could not parse:", typesafeConfig);
     }
 
-    public T detectConfiguration(String subConfig) {
-        Config streamsConfig = StreamsConfigurator.getConfig();
-        return detectConfiguration( streamsConfig.getConfig(subConfig));
-    }
+    return pojoConfig;
+  }
 
-    public T detectConfiguration(Config typesafeConfig, String subConfig) {
-        return detectConfiguration( typesafeConfig.getConfig(subConfig));
-    }
+  /**
+   * resolve a serializable configuration pojo from a portion of the JVM config object.
+   * @param subConfig subConfig
+   * @return result
+   */
+  public T detectConfiguration(String subConfig) {
+    Config streamsConfig = StreamsConfigurator.getConfig();
+    return detectConfiguration( streamsConfig.getConfig(subConfig));
+  }
+
+  /**
+   * resolve a serializable configuration pojo from a portion of a given typesafe config object.
+   * @param typesafeConfig typesafeConfig
+   * @param subConfig subConfig
+   * @return result
+   */
+  public T detectConfiguration(Config typesafeConfig, String subConfig) {
+    return detectConfiguration( typesafeConfig.getConfig(subConfig));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-config/src/main/java/org/apache/streams/config/StreamsConfigurator.java
----------------------------------------------------------------------
diff --git a/streams-config/src/main/java/org/apache/streams/config/StreamsConfigurator.java b/streams-config/src/main/java/org/apache/streams/config/StreamsConfigurator.java
index 6a8fb1d..319b32a 100644
--- a/streams-config/src/main/java/org/apache/streams/config/StreamsConfigurator.java
+++ b/streams-config/src/main/java/org/apache/streams/config/StreamsConfigurator.java
@@ -22,6 +22,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigRenderOptions;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,60 +37,60 @@ import java.net.URL;
  */
 public class StreamsConfigurator {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ComponentConfigurator.class);
-
-    private final static ObjectMapper mapper = new ObjectMapper();
+  private static final Logger LOGGER = LoggerFactory.getLogger(ComponentConfigurator.class);
 
-    /*
-        Pull all configuration files from the classpath, system properties, and environment variables
-     */
-    public static Config config = ConfigFactory.load();
+  private static final ObjectMapper mapper = new ObjectMapper();
 
-    public static Config getConfig() {
-        return config;
-    }
+  /*
+      Pull all configuration files from the classpath, system properties, and environment variables
+   */
+  public static Config config = ConfigFactory.load();
 
-    public static Config resolveConfig(String configUrl) throws MalformedURLException {
-        URL url = new URL(configUrl);
-        Config urlConfig = ConfigFactory.parseURL(url);
-        urlConfig.resolve();
-        config = urlConfig;
-        return config;
-    }
+  public static Config getConfig() {
+    return config;
+  }
 
+  public static Config resolveConfig(String configUrl) throws MalformedURLException {
+    URL url = new URL(configUrl);
+    Config urlConfig = ConfigFactory.parseURL(url);
+    urlConfig.resolve();
+    config = urlConfig;
+    return config;
+  }
 
 
-    public static StreamsConfiguration detectConfiguration() {
-        return detectConfiguration(config);
-    }
 
-    public static StreamsConfiguration detectConfiguration(Config typesafeConfig) {
+  public static StreamsConfiguration detectConfiguration() {
+    return detectConfiguration(config);
+  }
 
-        StreamsConfiguration pojoConfig = null;
+  public static StreamsConfiguration detectConfiguration(Config typesafeConfig) {
 
-        try {
-            pojoConfig = mapper.readValue(typesafeConfig.root().render(ConfigRenderOptions.concise()), StreamsConfiguration.class);
-        } catch (Exception e) {
-            e.printStackTrace();
-            LOGGER.warn("Could not parse:", typesafeConfig);
-        }
+    StreamsConfiguration pojoConfig = null;
 
-        return pojoConfig;
+    try {
+      pojoConfig = mapper.readValue(typesafeConfig.root().render(ConfigRenderOptions.concise()), StreamsConfiguration.class);
+    } catch (Exception e) {
+      e.printStackTrace();
+      LOGGER.warn("Could not parse:", typesafeConfig);
     }
 
-    public static StreamsConfiguration mergeConfigurations(Config base, Config delta) {
+    return pojoConfig;
+  }
 
-        Config merged = delta.withFallback(base);
+  public static StreamsConfiguration mergeConfigurations(Config base, Config delta) {
 
-        StreamsConfiguration pojoConfig = null;
+    Config merged = delta.withFallback(base);
 
-        try {
-            pojoConfig = mapper.readValue(merged.root().render(ConfigRenderOptions.concise()), StreamsConfiguration.class);
-        } catch (Exception e) {
-            e.printStackTrace();
-            LOGGER.warn("Failed to merge.");
-        }
+    StreamsConfiguration pojoConfig = null;
 
-        return pojoConfig;
+    try {
+      pojoConfig = mapper.readValue(merged.root().render(ConfigRenderOptions.concise()), StreamsConfiguration.class);
+    } catch (Exception e) {
+      e.printStackTrace();
+      LOGGER.warn("Failed to merge.");
     }
+
+    return pojoConfig;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-config/src/test/java/org/apache/streams/config/test/ComponentConfiguratorTest.java
----------------------------------------------------------------------
diff --git a/streams-config/src/test/java/org/apache/streams/config/test/ComponentConfiguratorTest.java b/streams-config/src/test/java/org/apache/streams/config/test/ComponentConfiguratorTest.java
index eddfb53..82cc6bc 100644
--- a/streams-config/src/test/java/org/apache/streams/config/test/ComponentConfiguratorTest.java
+++ b/streams-config/src/test/java/org/apache/streams/config/test/ComponentConfiguratorTest.java
@@ -40,7 +40,7 @@ import org.powermock.modules.junit4.PowerMockRunner;
 @PrepareForTest(StreamsConfigurator.class)
 public class ComponentConfiguratorTest {
 
-    private final static ObjectMapper mapper = new ObjectMapper();
+    private static final ObjectMapper mapper = new ObjectMapper();
 
     @Test
     public void testDetectDefaults() throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-config/src/test/java/org/apache/streams/config/test/StreamsConfiguratorTest.java
----------------------------------------------------------------------
diff --git a/streams-config/src/test/java/org/apache/streams/config/test/StreamsConfiguratorTest.java b/streams-config/src/test/java/org/apache/streams/config/test/StreamsConfiguratorTest.java
index 65dbd75..a29d8c7 100644
--- a/streams-config/src/test/java/org/apache/streams/config/test/StreamsConfiguratorTest.java
+++ b/streams-config/src/test/java/org/apache/streams/config/test/StreamsConfiguratorTest.java
@@ -44,7 +44,7 @@ import java.util.Scanner;
  */
 public class StreamsConfiguratorTest {
 
-    private final static ObjectMapper mapper = new ObjectMapper();
+    private static final ObjectMapper mapper = new ObjectMapper();
 
     @Test
     public void testDetectConfiguration() throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReader.java b/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReader.java
index fc00321..e3bfe70 100644
--- a/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReader.java
+++ b/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReader.java
@@ -18,6 +18,12 @@
 
 package org.apache.streams.amazon.kinesis;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistReader;
+import org.apache.streams.core.StreamsResultSet;
+
 import com.amazonaws.ClientConfiguration;
 import com.amazonaws.Protocol;
 import com.amazonaws.auth.AWSCredentials;
@@ -27,150 +33,150 @@ import com.amazonaws.regions.Regions;
 import com.amazonaws.services.kinesis.AmazonKinesisClient;
 import com.amazonaws.services.kinesis.model.DescribeStreamResult;
 import com.amazonaws.services.kinesis.model.Shard;
-import com.amazonaws.services.s3.AmazonS3Client;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Strings;
 import com.google.common.collect.Queues;
 import com.typesafe.config.Config;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.DatumStatusCounter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistReader;
-import org.apache.streams.core.StreamsResultSet;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.streams.amazon.kinesis.KinesisConfiguration;
-
 import java.io.Serializable;
 import java.math.BigInteger;
 import java.util.List;
-import java.util.Properties;
 import java.util.Queue;
 import java.util.concurrent.ConcurrentLinkedQueue;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
+/**
+ * KinesisPersistReader reads documents from kinesis.
+ */
 public class KinesisPersistReader implements StreamsPersistReader, Serializable {
 
-    public final static String STREAMS_ID = "KinesisPersistReader";
+  public static final String STREAMS_ID = "KinesisPersistReader";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(KinesisPersistReader.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(KinesisPersistReader.class);
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    private ObjectMapper mapper = new ObjectMapper();
+  private ObjectMapper mapper = new ObjectMapper();
 
-    private KinesisReaderConfiguration config;
+  private KinesisReaderConfiguration config;
 
-    protected Long pollInterval = StreamsConfigurator.detectConfiguration().getBatchFrequencyMs();
+  protected Long pollInterval = StreamsConfigurator.detectConfiguration().getBatchFrequencyMs();
 
-    private List<String> streamNames;
+  private List<String> streamNames;
 
-    private ExecutorService executor;
+  private ExecutorService executor;
 
-    protected AmazonKinesisClient client;
+  protected AmazonKinesisClient client;
 
-    public KinesisPersistReader() {
-        Config config = StreamsConfigurator.config.getConfig("kinesis");
-        this.config = new ComponentConfigurator<>(KinesisReaderConfiguration.class).detectConfiguration(config);
-        this.persistQueue  = new ConcurrentLinkedQueue<StreamsDatum>();
-    }
+  /**
+   * KinesisPersistReader constructor - resolves KinesisReaderConfiguration from JVM 'kinesis'.
+   */
+  public KinesisPersistReader() {
+    Config config = StreamsConfigurator.config.getConfig("kinesis");
+    this.config = new ComponentConfigurator<>(KinesisReaderConfiguration.class).detectConfiguration(config);
+    this.persistQueue  = new ConcurrentLinkedQueue<StreamsDatum>();
+  }
 
-    public KinesisPersistReader(KinesisReaderConfiguration config) {
-        this.config = config;
-        this.persistQueue  = new ConcurrentLinkedQueue<StreamsDatum>();
-    }
+  /**
+   * KinesisPersistReader constructor - uses provided KinesisReaderConfiguration.
+   */
+  public KinesisPersistReader(KinesisReaderConfiguration config) {
+    this.config = config;
+    this.persistQueue  = new ConcurrentLinkedQueue<StreamsDatum>();
+  }
 
-    public void setConfig(KinesisReaderConfiguration config) {
-        this.config = config;
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public void setConfig(KinesisReaderConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public void startStream() {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        this.streamNames = this.config.getStreams();
+  @Override
+  public void startStream() {
 
-        for (final String stream : streamNames) {
+    this.streamNames = this.config.getStreams();
 
-            DescribeStreamResult describeStreamResult = client.describeStream(stream);
+    for (final String stream : streamNames) {
 
-            if( "ACTIVE".equals(describeStreamResult.getStreamDescription().getStreamStatus())) {
+      DescribeStreamResult describeStreamResult = client.describeStream(stream);
 
-                List<Shard> shardList = describeStreamResult.getStreamDescription().getShards();
+      if( "ACTIVE".equals(describeStreamResult.getStreamDescription().getStreamStatus())) {
 
-                for( Shard shard : shardList ) {
-                    executor.submit(new KinesisPersistReaderTask(this, stream, shard.getShardId()));
-                }
-            }
+        List<Shard> shardList = describeStreamResult.getStreamDescription().getShards();
 
+        for( Shard shard : shardList ) {
+          executor.submit(new KinesisPersistReaderTask(this, stream, shard.getShardId()));
         }
+      }
 
     }
 
-    @Override
-    public StreamsResultSet readAll() {
-        return readCurrent();
-    }
+  }
 
-    public StreamsResultSet readCurrent() {
+  @Override
+  public StreamsResultSet readAll() {
+    return readCurrent();
+  }
 
-        StreamsResultSet current;
-        synchronized( KinesisPersistReader.class ) {
-            current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(persistQueue));
-            persistQueue.clear();
-        }
-        return current;
-    }
+  public StreamsResultSet readCurrent() {
 
-    @Override
-    public StreamsResultSet readNew(BigInteger bigInteger) {
-        return null;
+    StreamsResultSet current;
+    synchronized( KinesisPersistReader.class ) {
+      current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(persistQueue));
+      persistQueue.clear();
     }
-
-    @Override
-    public StreamsResultSet readRange(DateTime dateTime, DateTime dateTime2) {
-        return null;
+    return current;
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger bigInteger) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime dateTime, DateTime dateTime2) {
+    return null;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return !executor.isShutdown() && !executor.isTerminated();
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    // Connect to Kinesis
+    synchronized (this) {
+      // Create the credentials Object
+      AWSCredentials credentials = new BasicAWSCredentials(config.getKey(), config.getSecretKey());
+
+      ClientConfiguration clientConfig = new ClientConfiguration();
+      clientConfig.setProtocol(Protocol.valueOf(config.getProtocol().toString()));
+
+      this.client = new AmazonKinesisClient(credentials, clientConfig);
+      if (!Strings.isNullOrEmpty(config.getRegion()))
+        this.client.setRegion(Region.getRegion(Regions.fromName(config.getRegion())));
     }
+    streamNames = this.config.getStreams();
+    executor = Executors.newFixedThreadPool(streamNames.size());
+  }
 
-    @Override
-    public boolean isRunning() {
-        return !executor.isShutdown() && !executor.isTerminated();
-    }
+  @Override
+  public void cleanUp() {
 
-    @Override
-    public void prepare(Object configurationObject) {
-        // Connect to Kinesis
-        synchronized (this) {
-            // Create the credentials Object
-            AWSCredentials credentials = new BasicAWSCredentials(config.getKey(), config.getSecretKey());
-
-            ClientConfiguration clientConfig = new ClientConfiguration();
-            clientConfig.setProtocol(Protocol.valueOf(config.getProtocol().toString()));
-
-            this.client = new AmazonKinesisClient(credentials, clientConfig);
-            if (!Strings.isNullOrEmpty(config.getRegion()))
-                this.client.setRegion(Region.getRegion(Regions.fromName(config.getRegion())));
-        }
-        streamNames = this.config.getStreams();
-        executor = Executors.newFixedThreadPool(streamNames.size());
-    }
-
-    @Override
-    public void cleanUp() {
-
-        while( !executor.isTerminated()) {
-            try {
-                executor.awaitTermination(5, TimeUnit.SECONDS);
-            } catch (InterruptedException e) {}
-        }
+    while( !executor.isTerminated()) {
+      try {
+        executor.awaitTermination(5, TimeUnit.SECONDS);
+      } catch (InterruptedException e) {}
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReaderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReaderTask.java b/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReaderTask.java
index 7753031..a93fda8 100644
--- a/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReaderTask.java
+++ b/streams-contrib/streams-amazon-aws/streams-persist-kinesis/src/main/java/org/apache/streams/amazon/kinesis/KinesisPersistReaderTask.java
@@ -18,94 +18,102 @@
 
 package org.apache.streams.amazon.kinesis;
 
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+
 import com.amazonaws.services.kinesis.model.GetRecordsRequest;
 import com.amazonaws.services.kinesis.model.GetRecordsResult;
 import com.amazonaws.services.kinesis.model.GetShardIteratorRequest;
 import com.amazonaws.services.kinesis.model.GetShardIteratorResult;
 import com.amazonaws.services.kinesis.model.Record;
-import com.amazonaws.util.Base64;
 import com.google.common.collect.Maps;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.math.BigInteger;
 import java.nio.charset.Charset;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 
+/**
+ * KinesisPersistReaderTask reads documents from kinesis on behalf of
+ * @see {@link KinesisPersistReader}.
+ */
 public class KinesisPersistReaderTask implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(KinesisPersistReaderTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(KinesisPersistReaderTask.class);
 
-    private KinesisPersistReader reader;
-    private String streamName;
-    private String shardId;
+  private KinesisPersistReader reader;
+  private String streamName;
+  private String shardId;
 
-    private String shardIteratorId;
+  private String shardIteratorId;
 
-    private Long pollInterval = StreamsConfigurator.detectConfiguration().getBatchFrequencyMs();
+  private Long pollInterval = StreamsConfigurator.detectConfiguration().getBatchFrequencyMs();
 
-    public KinesisPersistReaderTask(KinesisPersistReader reader, String streamName, String shardId) {
-        this.reader = reader;
-        this.streamName = streamName;
-        this.shardId = shardId;
-    }
+  /**
+   * KinesisPersistReaderTask constructor.
+   */
+  public KinesisPersistReaderTask(KinesisPersistReader reader, String streamName, String shardId) {
+    this.reader = reader;
+    this.streamName = streamName;
+    this.shardId = shardId;
+  }
 
-    @Override
-    public void run() {
+  @Override
+  public void run() {
 
-        GetShardIteratorRequest shardIteratorRequest = new GetShardIteratorRequest()
-                .withStreamName(this.streamName)
-                .withShardId(shardId)
-                .withShardIteratorType("TRIM_HORIZON");
+    GetShardIteratorRequest shardIteratorRequest = new GetShardIteratorRequest()
+        .withStreamName(this.streamName)
+        .withShardId(shardId)
+        .withShardIteratorType("TRIM_HORIZON");
 
-        GetShardIteratorResult shardIteratorResult = reader.client.getShardIterator(shardIteratorRequest);
+    GetShardIteratorResult shardIteratorResult = reader.client.getShardIterator(shardIteratorRequest);
 
-        shardIteratorId = shardIteratorResult.getShardIterator();
+    shardIteratorId = shardIteratorResult.getShardIterator();
 
-        Map<String,Object> metadata = Maps.newHashMap();
-        metadata.put("streamName", streamName);
-        metadata.put("shardId", shardId);
+    Map<String,Object> metadata = Maps.newHashMap();
+    metadata.put("streamName", streamName);
+    metadata.put("shardId", shardId);
 
-        while(true) {
+    while (true) {
 
-            GetRecordsRequest recordsRequest = new GetRecordsRequest()
-                    .withShardIterator(shardIteratorId);
+      GetRecordsRequest recordsRequest = new GetRecordsRequest()
+          .withShardIterator(shardIteratorId);
 
-            GetRecordsResult recordsResult = reader.client.getRecords(recordsRequest);
+      GetRecordsResult recordsResult = reader.client.getRecords(recordsRequest);
 
-            LOGGER.info("{} records {} millis behind {}:{}:{} ", recordsResult.getRecords().size(), recordsResult.getMillisBehindLatest(), streamName, shardId, shardIteratorId);
+      LOGGER.info("{} records {} millis behind {}:{}:{} ", recordsResult.getRecords().size(), recordsResult.getMillisBehindLatest(), streamName, shardId, shardIteratorId);
 
-            shardIteratorId = recordsResult.getNextShardIterator();
+      shardIteratorId = recordsResult.getNextShardIterator();
 
-            List<Record> recordList = recordsResult.getRecords();
+      List<Record> recordList = recordsResult.getRecords();
 
-            for (Record record : recordList) {
-                try {
-                    byte[] byteArray = record.getData().array();
-                    //byte[] decoded = Base64.decode(byteArray);
-                    String message = new String(byteArray, Charset.forName("UTF-8"));
-                    reader.persistQueue.add(
-                            new StreamsDatum(
-                                    message,
-                                    record.getPartitionKey(),
-                                    new DateTime(),
-                                    new BigInteger(record.getSequenceNumber()),
-                                    metadata));
-                } catch( Exception e ) {
-                    LOGGER.warn("Exception processing record {}: {}", record, e);
-                }
-            }
-            try {
-                Thread.sleep(reader.pollInterval);
-            } catch (InterruptedException e) {}
+      for (Record record : recordList) {
+        try {
+          byte[] byteArray = record.getData().array();
+          //byte[] decoded = Base64.decode(byteArray);
+          String message = new String(byteArray, Charset.forName("UTF-8"));
+          reader.persistQueue.add(
+              new StreamsDatum(
+                  message,
+                  record.getPartitionKey(),
+                  new DateTime(),
+                  new BigInteger(record.getSequenceNumber()),
+                  metadata));
+        } catch ( Exception ex ) {
+          LOGGER.warn("Exception processing record {}: {}", record, ex);
         }
-
+      }
+      try {
+        Thread.sleep(reader.pollInterval);
+      } catch (InterruptedException ex) {
+        LOGGER.trace("InterruptedException", ex);
+      }
     }
 
+  }
+
 }


[19/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProviderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProviderTask.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProviderTask.java
index 111d213..dbf6ac9 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProviderTask.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProviderTask.java
@@ -18,12 +18,13 @@
 
 package org.apache.streams.twitter.provider;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
 import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import twitter4j.Paging;
@@ -39,77 +40,79 @@ import java.util.List;
  */
 public class TwitterTimelineProviderTask implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterTimelineProviderTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterTimelineProviderTask.class);
 
-    private static ObjectMapper MAPPER = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+  private static ObjectMapper MAPPER = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
 
-    protected TwitterTimelineProvider provider;
-    protected Twitter client;
-    protected Long id;
+  protected TwitterTimelineProvider provider;
+  protected Twitter client;
+  protected Long id;
 
-    public TwitterTimelineProviderTask(TwitterTimelineProvider provider, Twitter twitter, Long id) {
-        this.provider = provider;
-        this.client = twitter;
-        this.id = id;
-    }
+  /**
+   * TwitterTimelineProviderTask constructor.
+   * @param provider TwitterTimelineProvider
+   * @param twitter Twitter
+   * @param id Long
+   */
+  public TwitterTimelineProviderTask(TwitterTimelineProvider provider, Twitter twitter, Long id) {
+    this.provider = provider;
+    this.client = twitter;
+    this.id = id;
+  }
+
+  @Override
+  public void run() {
+
+    Paging paging = new Paging(1, 200);
+    List<Status> statuses = null;
+    int count = 0;
+
+    LOGGER.info(id + " Thread Starting");
+
+    do {
+      int keepTrying = 0;
 
-    @Override
-    public void run() {
-
-        Paging paging = new Paging(1, 200);
-        List<Status> statuses = null;
-        int count = 0;
-
-        LOGGER.info(id + " Thread Starting");
-
-        do
-        {
-            int keepTrying = 0;
-
-            // keep trying to load, give it 5 attempts.
-            //This value was chosen because it seemed like a reasonable number of times
-            //to retry capturing a timeline given the sorts of errors that could potentially
-            //occur (network timeout/interruption, faulty client, etc.)
-            while (keepTrying < 5)
-            {
-
-                try
-                {
-                    this.client = provider.getTwitterClient();
-
-                    statuses = client.getUserTimeline(id, paging);
-
-                    for (Status tStat : statuses) {
-
-                        String json = TwitterObjectFactory.getRawJSON(tStat);
-                        if( count < provider.getConfig().getMaxItems() ) {
-                            try {
-                                org.apache.streams.twitter.pojo.Tweet tweet = MAPPER.readValue(json, org.apache.streams.twitter.pojo.Tweet.class);
-                                ComponentUtils.offerUntilSuccess(new StreamsDatum(tweet), provider.providerQueue);
-                            } catch(Exception exception) {
-                                LOGGER.warn("Failed to read document as Tweet ", tStat);
-                            }
-                            count++;
-                        }
-
-                    }
-
-                    paging.setPage(paging.getPage() + 1);
-
-                    keepTrying = 10;
-                }
-                catch(TwitterException twitterException) {
-                    keepTrying += TwitterErrorHandler.handleTwitterError(client, id, twitterException);
-                }
-                catch(Exception e) {
-                    keepTrying += TwitterErrorHandler.handleTwitterError(client, id, e);
-                }
+      // keep trying to load, give it 5 attempts.
+      //This value was chosen because it seemed like a reasonable number of times
+      //to retry capturing a timeline given the sorts of errors that could potentially
+      //occur (network timeout/interruption, faulty client, etc.)
+      while (keepTrying < 5) {
+
+        try {
+          this.client = provider.getTwitterClient();
+
+          statuses = client.getUserTimeline(id, paging);
+
+          for (Status twitterStatus : statuses) {
+
+            String json = TwitterObjectFactory.getRawJSON(twitterStatus);
+
+            if ( count < provider.getConfig().getMaxItems() ) {
+              try {
+                org.apache.streams.twitter.pojo.Tweet tweet = MAPPER.readValue(json, org.apache.streams.twitter.pojo.Tweet.class);
+                ComponentUtils.offerUntilSuccess(new StreamsDatum(tweet), provider.providerQueue);
+              } catch (Exception exception) {
+                LOGGER.warn("Failed to read document as Tweet ", twitterStatus);
+              }
+              count++;
             }
-        }
-        while (provider.shouldContinuePulling(statuses) && count < provider.getConfig().getMaxItems());
 
-        LOGGER.info(id + " Thread Finished");
+          }
+
+          paging.setPage(paging.getPage() + 1);
 
+          keepTrying = 10;
+        } catch (TwitterException twitterException) {
+          keepTrying += TwitterErrorHandler.handleTwitterError(client, id, twitterException);
+        } catch (Exception ex) {
+          keepTrying += TwitterErrorHandler.handleTwitterError(client, id, ex);
+        }
+      }
     }
+    while (provider.shouldContinuePulling(statuses) && count < provider.getConfig().getMaxItems());
+
+    LOGGER.info(id + " Thread Finished");
+
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterUserInformationProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterUserInformationProvider.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterUserInformationProvider.java
index 15ff791..3210f80 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterUserInformationProvider.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterUserInformationProvider.java
@@ -18,18 +18,6 @@
 
 package org.apache.streams.twitter.provider;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.google.common.util.concurrent.Uninterruptibles;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
@@ -43,6 +31,18 @@ import org.apache.streams.twitter.TwitterUserInformationConfiguration;
 import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
 import org.apache.streams.twitter.pojo.User;
 import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.google.common.util.concurrent.Uninterruptibles;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+import org.apache.commons.lang.NotImplementedException;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -73,359 +73,394 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import static java.util.concurrent.Executors.newSingleThreadExecutor;
 
-public class TwitterUserInformationProvider implements StreamsProvider, Serializable
-{
+/**
+ * Retrieve current profile status from a list of user ids or names.
+ */
+public class TwitterUserInformationProvider implements StreamsProvider, Serializable {
+
+  public static final String STREAMS_ID = "TwitterUserInformationProvider";
+
+  private static ObjectMapper MAPPER = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterUserInformationProvider.class);
+
+  public static final int MAX_NUMBER_WAITING = 1000;
+
+  private TwitterUserInformationConfiguration config;
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply (at least) the following required configuration in application.conf:
+   *
+   * <p/>
+   * twitter.oauth.consumerKey
+   * twitter.oauth.consumerSecret
+   * twitter.oauth.accessToken
+   * twitter.oauth.accessTokenSecret
+   * twitter.info
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.twitter.provider.TwitterUserInformationProvider -Dexec.args="application.conf tweets.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    TwitterUserInformationConfiguration config = new ComponentConfigurator<>(TwitterUserInformationConfiguration.class).detectConfiguration(typesafe, "twitter");
+    TwitterUserInformationProvider provider = new TwitterUserInformationProvider(config);
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          json = MAPPER.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
+        }
+      }
+    }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
+
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    public static final String STREAMS_ID = "TwitterUserInformationProvider";
+  protected volatile Queue<StreamsDatum> providerQueue;
 
-    private static ObjectMapper MAPPER = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+  public TwitterUserInformationConfiguration getConfig() {
+    return config;
+  }
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(TwitterUserInformationProvider.class);
+  public void setConfig(TwitterUserInformationConfiguration config) {
+    this.config = config;
+  }
 
-    public static final int MAX_NUMBER_WAITING = 1000;
+  protected Iterator<Long[]> idsBatches;
+  protected Iterator<String[]> screenNameBatches;
 
-    private TwitterUserInformationConfiguration config;
+  protected ListeningExecutorService executor;
 
-    public static void main(String[] args) throws Exception {
+  protected DateTime start;
+  protected DateTime end;
 
-        Preconditions.checkArgument(args.length >= 2);
+  protected final AtomicBoolean running = new AtomicBoolean();
 
-        String configfile = args[0];
-        String outfile = args[1];
+  // TODO: this should be abstracted out
+  public static ExecutorService newFixedThreadPoolWithQueueSize(int numThreads, int queueSize) {
+    return new ThreadPoolExecutor(numThreads, numThreads,
+        5000L, TimeUnit.MILLISECONDS,
+        new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
+  }
 
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+  /**
+   * TwitterUserInformationProvider constructor.
+   * Resolves config from JVM properties 'twitter'.
+   */
+  public TwitterUserInformationProvider() {
+    this.config = new ComponentConfigurator<>(TwitterUserInformationConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("twitter"));
+  }
 
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+  public TwitterUserInformationProvider(TwitterUserInformationConfiguration config) {
+    this.config = config;
+  }
 
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        TwitterUserInformationConfiguration config = new ComponentConfigurator<>(TwitterUserInformationConfiguration.class).detectConfiguration(typesafe, "twitter");
-        TwitterUserInformationProvider provider = new TwitterUserInformationProvider(config);
+  public Queue<StreamsDatum> getProviderQueue() {
+    return this.providerQueue;
+  }
 
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = MAPPER.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+
+    if ( configurationObject instanceof TwitterFollowingConfiguration ) {
+      config = (TwitterUserInformationConfiguration) configurationObject;
     }
 
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+    Preconditions.checkNotNull(config);
+    Preconditions.checkNotNull(config.getOauth());
+    Preconditions.checkNotNull(config.getOauth().getConsumerKey());
+    Preconditions.checkNotNull(config.getOauth().getConsumerSecret());
+    Preconditions.checkNotNull(config.getOauth().getAccessToken());
+    Preconditions.checkNotNull(config.getOauth().getAccessTokenSecret());
+    Preconditions.checkNotNull(config.getInfo());
+
+    try {
+      lock.writeLock().lock();
+      providerQueue = constructQueue();
+    } finally {
+      lock.writeLock().unlock();
+    }
 
-    protected volatile Queue<StreamsDatum> providerQueue;
+    Preconditions.checkNotNull(providerQueue);
 
-    public TwitterUserInformationConfiguration getConfig()              { return config; }
+    List<String> screenNames = new ArrayList<String>();
+    List<String[]> screenNameBatches = new ArrayList<String[]>();
 
-    public void setConfig(TwitterUserInformationConfiguration config)   { this.config = config; }
+    List<Long> ids = new ArrayList<Long>();
+    List<Long[]> idsBatches = new ArrayList<Long[]>();
 
-    protected Iterator<Long[]> idsBatches;
-    protected Iterator<String[]> screenNameBatches;
+    for (String s : config.getInfo()) {
+      if (s != null) {
+        String potentialScreenName = s.replaceAll("@", "").trim().toLowerCase();
 
-    protected ListeningExecutorService executor;
+        // See if it is a long, if it is, add it to the user iD list, if it is not, add it to the
+        // screen name list
+        try {
+          ids.add(Long.parseLong(potentialScreenName));
+        } catch (Exception ex) {
+          screenNames.add(potentialScreenName);
+        }
 
-    protected DateTime start;
-    protected DateTime end;
+        // Twitter allows for batches up to 100 per request, but you cannot mix types
 
-    protected final AtomicBoolean running = new AtomicBoolean();
+        if (ids.size() >= 100) {
+          // add the batch
+          idsBatches.add(ids.toArray(new Long[ids.size()]));
+          // reset the Ids
+          ids = new ArrayList<Long>();
+        }
 
-    public static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
-        return new ThreadPoolExecutor(nThreads, nThreads,
-                5000L, TimeUnit.MILLISECONDS,
-                new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
+        if (screenNames.size() >= 100) {
+          // add the batch
+          screenNameBatches.add(screenNames.toArray(new String[ids.size()]));
+          // reset the Ids
+          screenNames = new ArrayList<String>();
+        }
+      }
     }
 
-    public TwitterUserInformationProvider() {
-        this.config = new ComponentConfigurator<>(TwitterUserInformationConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("twitter"));
-    }
 
-    public TwitterUserInformationProvider(TwitterUserInformationConfiguration config) {
-        this.config = config;
+    if (ids.size() > 0) {
+      idsBatches.add(ids.toArray(new Long[ids.size()]));
     }
 
-    public Queue<StreamsDatum> getProviderQueue() {
-        return this.providerQueue;
+    if (screenNames.size() > 0) {
+      screenNameBatches.add(screenNames.toArray(new String[ids.size()]));
     }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+    if (ids.size() + screenNames.size() > 0) {
+      executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, (ids.size() + screenNames.size())));
+    } else {
+      executor = MoreExecutors.listeningDecorator(newSingleThreadExecutor());
     }
 
-    @Override
-    public void prepare(Object o) {
-
-        if( o instanceof TwitterFollowingConfiguration )
-            config = (TwitterUserInformationConfiguration) o;
-
-        Preconditions.checkNotNull(config);
-        Preconditions.checkNotNull(config.getOauth());
-        Preconditions.checkNotNull(config.getOauth().getConsumerKey());
-        Preconditions.checkNotNull(config.getOauth().getConsumerSecret());
-        Preconditions.checkNotNull(config.getOauth().getAccessToken());
-        Preconditions.checkNotNull(config.getOauth().getAccessTokenSecret());
-        Preconditions.checkNotNull(config.getInfo());
-
-        try {
-            lock.writeLock().lock();
-            providerQueue = constructQueue();
-        } finally {
-            lock.writeLock().unlock();
-        }
-
-        Preconditions.checkNotNull(providerQueue);
-
-        List<String> screenNames = new ArrayList<String>();
-        List<String[]> screenNameBatches = new ArrayList<String[]>();
-
-        List<Long> ids = new ArrayList<Long>();
-        List<Long[]> idsBatches = new ArrayList<Long[]>();
-
-        for(String s : config.getInfo()) {
-            if(s != null)
-            {
-                String potentialScreenName = s.replaceAll("@", "").trim().toLowerCase();
-
-                // See if it is a long, if it is, add it to the user iD list, if it is not, add it to the
-                // screen name list
-                try {
-                    ids.add(Long.parseLong(potentialScreenName));
-                } catch (Exception e) {
-                    screenNames.add(potentialScreenName);
-                }
-
-                // Twitter allows for batches up to 100 per request, but you cannot mix types
-
-                if(ids.size() >= 100) {
-                    // add the batch
-                    idsBatches.add(ids.toArray(new Long[ids.size()]));
-                    // reset the Ids
-                    ids = new ArrayList<Long>();
-                }
-
-                if(screenNames.size() >= 100) {
-                    // add the batch
-                    screenNameBatches.add(screenNames.toArray(new String[ids.size()]));
-                    // reset the Ids
-                    screenNames = new ArrayList<String>();
-                }
-            }
-        }
+    Preconditions.checkNotNull(executor);
 
+    this.idsBatches = idsBatches.iterator();
+    this.screenNameBatches = screenNameBatches.iterator();
+  }
 
-        if(ids.size() > 0)
-            idsBatches.add(ids.toArray(new Long[ids.size()]));
+  @Override
+  public void startStream() {
 
-        if(screenNames.size() > 0)
-            screenNameBatches.add(screenNames.toArray(new String[ids.size()]));
+    Preconditions.checkNotNull(executor);
 
-        if(ids.size() + screenNames.size() > 0)
-            executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, (ids.size() + screenNames.size())));
-        else
-            executor = MoreExecutors.listeningDecorator(newSingleThreadExecutor());
+    Preconditions.checkArgument(idsBatches.hasNext() || screenNameBatches.hasNext());
 
-        Preconditions.checkNotNull(executor);
+    LOGGER.info("{}{} - startStream", idsBatches, screenNameBatches);
 
-        this.idsBatches = idsBatches.iterator();
-        this.screenNameBatches = screenNameBatches.iterator();
+    while (idsBatches.hasNext()) {
+      loadBatch(idsBatches.next());
     }
 
-    @Override
-    public void startStream() {
-
-        Preconditions.checkNotNull(executor);
-
-        Preconditions.checkArgument(idsBatches.hasNext() || screenNameBatches.hasNext());
+    while (screenNameBatches.hasNext()) {
+      loadBatch(screenNameBatches.next());
+    }
 
-        LOGGER.info("{}{} - startStream", idsBatches, screenNameBatches);
+    running.set(true);
 
-        while(idsBatches.hasNext())
-            loadBatch(idsBatches.next());
+    executor.shutdown();
+  }
 
-        while(screenNameBatches.hasNext())
-            loadBatch(screenNameBatches.next());
+  protected void loadBatch(Long[] ids) {
+    Twitter client = getTwitterClient();
+    int keepTrying = 0;
 
-        running.set(true);
+    // keep trying to load, give it 5 attempts.
+    //while (keepTrying < 10)
+    while (keepTrying < 1) {
+      try {
+        long[] toQuery = new long[ids.length];
 
-        executor.shutdown();
-    }
+        for (int i = 0; i < ids.length; i++) {
+          toQuery[i] = ids[i];
+        }
 
-    protected void loadBatch(Long[] ids) {
-        Twitter client = getTwitterClient();
-        int keepTrying = 0;
-
-        // keep trying to load, give it 5 attempts.
-        //while (keepTrying < 10)
-        while (keepTrying < 1)
-        {
-            try
-            {
-                long[] toQuery = new long[ids.length];
-                for(int i = 0; i < ids.length; i++)
-                    toQuery[i] = ids[i];
-
-                for (twitter4j.User tUser : client.lookupUsers(toQuery)) {
-                    String json = DataObjectFactory.getRawJSON(tUser);
-                    try {
-                        User user = MAPPER.readValue(json, org.apache.streams.twitter.pojo.User.class);
-                        ComponentUtils.offerUntilSuccess(new StreamsDatum(user), providerQueue);
-                    } catch(Exception exception) {
-                        LOGGER.warn("Failed to read document as User ", tUser);
-                    }
-                }
-                keepTrying = 10;
-            }
-            catch(TwitterException twitterException) {
-                keepTrying += TwitterErrorHandler.handleTwitterError(client, twitterException);
-            }
-            catch(Exception e) {
-                keepTrying += TwitterErrorHandler.handleTwitterError(client, e);
-            }
+        for (twitter4j.User twitterUser : client.lookupUsers(toQuery)) {
+          String json = DataObjectFactory.getRawJSON(twitterUser);
+          try {
+            User user = MAPPER.readValue(json, org.apache.streams.twitter.pojo.User.class);
+            ComponentUtils.offerUntilSuccess(new StreamsDatum(user), providerQueue);
+          } catch (Exception exception) {
+            LOGGER.warn("Failed to read document as User ", twitterUser);
+          }
         }
+        keepTrying = 10;
+      } catch (TwitterException twitterException) {
+        keepTrying += TwitterErrorHandler.handleTwitterError(client, twitterException);
+      } catch (Exception ex) {
+        keepTrying += TwitterErrorHandler.handleTwitterError(client, ex);
+      }
     }
-
-    protected void loadBatch(String[] ids) {
-        Twitter client = getTwitterClient();
-        int keepTrying = 0;
-
-        // keep trying to load, give it 5 attempts.
-        //while (keepTrying < 10)
-        while (keepTrying < 1)
-        {
-            try
-            {
-                for (twitter4j.User tUser : client.lookupUsers(ids)) {
-                    String json = DataObjectFactory.getRawJSON(tUser);
-                    try {
-                        User user = MAPPER.readValue(json, org.apache.streams.twitter.pojo.User.class);
-                        ComponentUtils.offerUntilSuccess(new StreamsDatum(user), providerQueue);
-                    } catch(Exception exception) {
-                        LOGGER.warn("Failed to read document as User ", tUser);
-                    }
-                }
-                keepTrying = 10;
-            }
-            catch(TwitterException twitterException) {
-                keepTrying += TwitterErrorHandler.handleTwitterError(client, twitterException);
-            }
-            catch(Exception e) {
-                keepTrying += TwitterErrorHandler.handleTwitterError(client, e);
-            }
+  }
+
+  protected void loadBatch(String[] ids) {
+    Twitter client = getTwitterClient();
+    int keepTrying = 0;
+
+    // keep trying to load, give it 5 attempts.
+    //while (keepTrying < 10)
+    while (keepTrying < 1) {
+      try {
+        for (twitter4j.User twitterUser : client.lookupUsers(ids)) {
+          String json = DataObjectFactory.getRawJSON(twitterUser);
+          try {
+            User user = MAPPER.readValue(json, org.apache.streams.twitter.pojo.User.class);
+            ComponentUtils.offerUntilSuccess(new StreamsDatum(user), providerQueue);
+          } catch (Exception exception) {
+            LOGGER.warn("Failed to read document as User ", twitterUser);
+          }
         }
+        keepTrying = 10;
+      } catch (TwitterException twitterException) {
+        keepTrying += TwitterErrorHandler.handleTwitterError(client, twitterException);
+      } catch (Exception ex) {
+        keepTrying += TwitterErrorHandler.handleTwitterError(client, ex);
+      }
     }
+  }
 
-    public StreamsResultSet readCurrent() {
-
-        LOGGER.debug("{}{} - readCurrent", idsBatches, screenNameBatches);
+  @Override
+  public StreamsResultSet readCurrent() {
 
-        StreamsResultSet result;
-
-        try {
-            lock.writeLock().lock();
-            result = new StreamsResultSet(providerQueue);
-            result.setCounter(new DatumStatusCounter());
-            providerQueue = constructQueue();
-            LOGGER.debug("{}{} - providing {} docs", idsBatches, screenNameBatches, result.size());
-        } finally {
-            lock.writeLock().unlock();
-        }
+    LOGGER.debug("{}{} - readCurrent", idsBatches, screenNameBatches);
 
-        return result;
+    StreamsResultSet result;
 
+    try {
+      lock.writeLock().lock();
+      result = new StreamsResultSet(providerQueue);
+      result.setCounter(new DatumStatusCounter());
+      providerQueue = constructQueue();
+      LOGGER.debug("{}{} - providing {} docs", idsBatches, screenNameBatches, result.size());
+    } finally {
+      lock.writeLock().unlock();
     }
 
-    protected Queue<StreamsDatum> constructQueue() {
-        return new LinkedBlockingQueue<StreamsDatum>();
-    }
+    return result;
 
-    public StreamsResultSet readNew(BigInteger sequence) {
-        LOGGER.debug("{} readNew", STREAMS_ID);
-        throw new NotImplementedException();
-    }
+  }
 
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        LOGGER.debug("{} readRange", STREAMS_ID);
-        this.start = start;
-        this.end = end;
-        readCurrent();
-        StreamsResultSet result = (StreamsResultSet)providerQueue.iterator();
-        return result;
-    }
+  protected Queue<StreamsDatum> constructQueue() {
+    return new LinkedBlockingQueue<StreamsDatum>();
+  }
 
-    @Override
-    public boolean isRunning() {
+  public StreamsResultSet readNew(BigInteger sequence) {
+    LOGGER.debug("{} readNew", STREAMS_ID);
+    throw new NotImplementedException();
+  }
 
-        if( providerQueue.isEmpty() && executor.isTerminated() ) {
-            LOGGER.info("{}{} - completed", idsBatches, screenNameBatches);
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    LOGGER.debug("{} readRange", STREAMS_ID);
+    this.start = start;
+    this.end = end;
+    readCurrent();
+    StreamsResultSet result = (StreamsResultSet)providerQueue.iterator();
+    return result;
+  }
 
-            running.set(false);
+  @Override
+  public boolean isRunning() {
 
-            LOGGER.info("Exiting");
-        }
+    if ( providerQueue.isEmpty() && executor.isTerminated() ) {
+      LOGGER.info("{}{} - completed", idsBatches, screenNameBatches);
+
+      running.set(false);
 
-        return running.get();
+      LOGGER.info("Exiting");
     }
 
-    void shutdownAndAwaitTermination(ExecutorService pool) {
-        pool.shutdown(); // Disable new tasks from being submitted
-        try {
-            // Wait a while for existing tasks to terminate
-            if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
-                pool.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!pool.awaitTermination(10, TimeUnit.SECONDS))
-                    System.err.println("Pool did not terminate");
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            pool.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
+    return running.get();
+  }
+
+  void shutdownAndAwaitTermination(ExecutorService pool) {
+    pool.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+        pool.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+          System.err.println("Pool did not terminate");
         }
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      pool.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
     }
+  }
 
 
+  // TODO: abstract out, also appears in TwitterTimelineProvider
+  protected Twitter getTwitterClient() {
+    String baseUrl = TwitterProviderUtil.baseUrl(config);
 
-    protected Twitter getTwitterClient()
-    {
-        String baseUrl = TwitterProviderUtil.baseUrl(config);
+    ConfigurationBuilder builder = new ConfigurationBuilder()
+        .setOAuthConsumerKey(config.getOauth().getConsumerKey())
+        .setOAuthConsumerSecret(config.getOauth().getConsumerSecret())
+        .setOAuthAccessToken(config.getOauth().getAccessToken())
+        .setOAuthAccessTokenSecret(config.getOauth().getAccessTokenSecret())
+        .setIncludeEntitiesEnabled(true)
+        .setJSONStoreEnabled(true)
+        .setAsyncNumThreads(3)
+        .setRestBaseURL(baseUrl)
+        .setIncludeMyRetweetEnabled(Boolean.TRUE)
+        .setPrettyDebugEnabled(Boolean.TRUE);
 
-        ConfigurationBuilder builder = new ConfigurationBuilder()
-                .setOAuthConsumerKey(config.getOauth().getConsumerKey())
-                .setOAuthConsumerSecret(config.getOauth().getConsumerSecret())
-                .setOAuthAccessToken(config.getOauth().getAccessToken())
-                .setOAuthAccessTokenSecret(config.getOauth().getAccessTokenSecret())
-                .setIncludeEntitiesEnabled(true)
-                .setJSONStoreEnabled(true)
-                .setAsyncNumThreads(3)
-                .setRestBaseURL(baseUrl)
-                .setIncludeMyRetweetEnabled(Boolean.TRUE)
-                .setPrettyDebugEnabled(Boolean.TRUE);
+    return new TwitterFactory(builder.build()).getInstance();
+  }
 
-        return new TwitterFactory(builder.build()).getInstance();
-    }
+  protected void callback() {
 
-    protected void callback() {
 
+  }
 
-    }
-
-    @Override
-    public void cleanUp() {
-        shutdownAndAwaitTermination(executor);
-    }
+  @Override
+  public void cleanUp() {
+    shutdownAndAwaitTermination(executor);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/data/TwitterObjectMapperIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/data/TwitterObjectMapperIT.java b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/data/TwitterObjectMapperIT.java
index 42f0fba..a480fd1 100644
--- a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/data/TwitterObjectMapperIT.java
+++ b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/data/TwitterObjectMapperIT.java
@@ -18,25 +18,27 @@
 
 package org.apache.streams.twitter.test.data;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.twitter.converter.TwitterDocumentClassifier;
+import org.apache.streams.twitter.pojo.Delete;
+import org.apache.streams.twitter.pojo.Retweet;
+import org.apache.streams.twitter.pojo.Tweet;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.base.Optional;
 import com.google.common.collect.Lists;
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import org.apache.commons.lang.StringUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.twitter.converter.TwitterDocumentClassifier;
-import org.apache.streams.twitter.pojo.Delete;
-import org.apache.streams.twitter.pojo.Retweet;
-import org.apache.streams.twitter.pojo.Tweet;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.BufferedReader;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
 import static org.apache.streams.twitter.converter.TwitterDateTimeFormat.TWITTER_FORMAT;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.not;
@@ -45,87 +47,87 @@ import static org.hamcrest.Matchers.greaterThan;
 import static org.junit.Assert.assertThat;
 
 /**
-* Tests serialization / deserialization of twitter jsons
-*/
+ * Tests serialization / deserialization of twitter jsons.
+ */
 public class TwitterObjectMapperIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterObjectMapperIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterObjectMapperIT.class);
+
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TWITTER_FORMAT));
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TWITTER_FORMAT));
+  @Test
+  public void tests() {
 
-    @Test
-    public void Tests()
-    {
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
 
-        InputStream is = TwitterObjectMapperIT.class.getResourceAsStream("/testtweets.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+    InputStream is = TwitterObjectMapperIT.class.getResourceAsStream("/testtweets.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
 
-        int tweetlinks = 0;
-        int retweetlinks = 0;
+    int tweetlinks = 0;
+    int retweetlinks = 0;
 
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                if(!StringUtils.isEmpty(line))
-                {
-                    LOGGER.info("raw: {}", line);
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        if (!StringUtils.isEmpty(line)) {
 
-                    Class detected = new TwitterDocumentClassifier().detectClasses(line).get(0);
+          LOGGER.info("raw: {}", line);
 
-                    ObjectNode event = (ObjectNode) mapper.readTree(line);
+          Class detected = new TwitterDocumentClassifier().detectClasses(line).get(0);
 
-                    assertThat(event, is(not(nullValue())));
+          ObjectNode event = (ObjectNode) mapper.readTree(line);
 
-                    if( detected == Tweet.class ) {
+          assertThat(event, is(not(nullValue())));
 
-                        Tweet tweet = mapper.convertValue(event, Tweet.class);
+          if ( detected == Tweet.class ) {
 
-                        assertThat(tweet, is(not(nullValue())));
-                        assertThat(tweet.getCreatedAt(), is(not(nullValue())));
-                        assertThat(tweet.getText(), is(not(nullValue())));
-                        assertThat(tweet.getUser(), is(not(nullValue())));
+            Tweet tweet = mapper.convertValue(event, Tweet.class);
 
-                        tweetlinks += Optional.fromNullable(tweet.getEntities().getUrls().size()).or(0);
+            assertThat(tweet, is(not(nullValue())));
+            assertThat(tweet.getCreatedAt(), is(not(nullValue())));
+            assertThat(tweet.getText(), is(not(nullValue())));
+            assertThat(tweet.getUser(), is(not(nullValue())));
 
-                    } else if( detected == Retweet.class ) {
+            tweetlinks += Optional.fromNullable(tweet.getEntities().getUrls().size()).or(0);
 
-                        Retweet retweet = mapper.convertValue(event, Retweet.class);
+          } else if ( detected == Retweet.class ) {
 
-                        assertThat(retweet.getRetweetedStatus(), is(not(nullValue())));
-                        assertThat(retweet.getRetweetedStatus().getCreatedAt(), is(not(nullValue())));
-                        assertThat(retweet.getRetweetedStatus().getText(), is(not(nullValue())));
-                        assertThat(retweet.getRetweetedStatus().getUser(), is(not(nullValue())));
-                        assertThat(retweet.getRetweetedStatus().getUser().getId(), is(not(nullValue())));
-                        assertThat(retweet.getRetweetedStatus().getUser().getCreatedAt(), is(not(nullValue())));
+            Retweet retweet = mapper.convertValue(event, Retweet.class);
 
-                        retweetlinks += Optional.fromNullable(retweet.getRetweetedStatus().getEntities().getUrls().size()).or(0);
+            assertThat(retweet.getRetweetedStatus(), is(not(nullValue())));
+            assertThat(retweet.getRetweetedStatus().getCreatedAt(), is(not(nullValue())));
+            assertThat(retweet.getRetweetedStatus().getText(), is(not(nullValue())));
+            assertThat(retweet.getRetweetedStatus().getUser(), is(not(nullValue())));
+            assertThat(retweet.getRetweetedStatus().getUser().getId(), is(not(nullValue())));
+            assertThat(retweet.getRetweetedStatus().getUser().getCreatedAt(), is(not(nullValue())));
 
-                    } else if( detected == Delete.class ) {
+            retweetlinks += Optional.fromNullable(retweet.getRetweetedStatus().getEntities().getUrls().size()).or(0);
 
-                        Delete delete = mapper.convertValue(event, Delete.class);
+          } else if ( detected == Delete.class ) {
 
-                        assertThat(delete.getDelete(), is(not(nullValue())));
-                        assertThat(delete.getDelete().getStatus(), is(not(nullValue())));
-                        assertThat(delete.getDelete().getStatus().getId(), is(not(nullValue())));
-                        assertThat(delete.getDelete().getStatus().getUserId(), is(not(nullValue())));
+            Delete delete = mapper.convertValue(event, Delete.class);
 
-                    } else {
-                        Assert.fail();
-                    }
+            assertThat(delete.getDelete(), is(not(nullValue())));
+            assertThat(delete.getDelete().getStatus(), is(not(nullValue())));
+            assertThat(delete.getDelete().getStatus().getId(), is(not(nullValue())));
+            assertThat(delete.getDelete().getStatus().getUserId(), is(not(nullValue())));
 
-                }
-            }
-        } catch( Exception e ) {
-            LOGGER.error("Exception: ", e);
+          } else {
             Assert.fail();
+          }
+
         }
+      }
+    } catch ( Exception ex ) {
+      LOGGER.error("Exception: ", ex);
+      Assert.fail();
+    }
 
-        assertThat(tweetlinks, is(greaterThan(0)));
-        assertThat(retweetlinks, is(greaterThan(0)));
+    assertThat(tweetlinks, is(greaterThan(0)));
+    assertThat(retweetlinks, is(greaterThan(0)));
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterFollowingProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterFollowingProviderIT.java b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterFollowingProviderIT.java
index 3d7a6d2..720f6ec 100644
--- a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterFollowingProviderIT.java
+++ b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterFollowingProviderIT.java
@@ -19,6 +19,7 @@
 package org.apache.streams.twitter.test.providers;
 
 import org.apache.streams.twitter.provider.TwitterFollowingProvider;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -29,39 +30,39 @@ import java.io.LineNumberReader;
 
 public class TwitterFollowingProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(TwitterFollowingProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterFollowingProviderIT.class);
 
-    @Test
-    public void testTwitterFollowingProvider() throws Exception {
+  @Test
+  public void testTwitterFollowingProvider() throws Exception {
 
-        String configfile = "./target/test-classes/TwitterFollowingProviderIT.conf";
-        String outfile = "./target/test-classes/TwitterFollowingProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/TwitterFollowingProviderIT.conf";
+    String outfile = "./target/test-classes/TwitterFollowingProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                TwitterFollowingProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        TwitterFollowingProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() == 10000);
+    assert (outCounter.getLineNumber() == 10000);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterStreamProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterStreamProviderIT.java b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterStreamProviderIT.java
index c553bf3..12279b9 100644
--- a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterStreamProviderIT.java
+++ b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterStreamProviderIT.java
@@ -19,6 +19,7 @@
 package org.apache.streams.twitter.test.providers;
 
 import org.apache.streams.twitter.provider.TwitterStreamProvider;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -29,39 +30,39 @@ import java.io.LineNumberReader;
 
 public class TwitterStreamProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(TwitterStreamProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterStreamProviderIT.class);
 
-    final String outfile = "./target/test-classes/TwitterStreamProviderIT.stdout.txt";
-    final String configfile = "./target/test-classes/TwitterStreamProviderIT.conf";
+  final String outfile = "./target/test-classes/TwitterStreamProviderIT.stdout.txt";
+  final String configfile = "./target/test-classes/TwitterStreamProviderIT.conf";
 
-    @Test
-    public void testTwitterStreamProvider() throws Exception {
+  @Test
+  public void testTwitterStreamProvider() throws Exception {
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                TwitterStreamProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        TwitterStreamProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() > 25);
+    assert (outCounter.getLineNumber() > 25);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterTimelineProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterTimelineProviderIT.java b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterTimelineProviderIT.java
index dadfb54..6bb7f20 100644
--- a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterTimelineProviderIT.java
+++ b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterTimelineProviderIT.java
@@ -19,6 +19,7 @@
 package org.apache.streams.twitter.test.providers;
 
 import org.apache.streams.twitter.provider.TwitterTimelineProvider;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -29,39 +30,39 @@ import java.io.LineNumberReader;
 
 public class TwitterTimelineProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(TwitterTimelineProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterTimelineProviderIT.class);
 
-    @Test
-    public void testTwitterTimelineProvider() throws Exception {
+  @Test
+  public void testTwitterTimelineProvider() throws Exception {
 
-        String configfile = "./target/test-classes/TwitterTimelineProviderIT.conf";
-        String outfile = "./target/test-classes/TwitterTimelineProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/TwitterTimelineProviderIT.conf";
+    String outfile = "./target/test-classes/TwitterTimelineProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                TwitterTimelineProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        TwitterTimelineProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() == 1000);
+    assert (outCounter.getLineNumber() == 1000);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterUserInformationProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterUserInformationProviderIT.java b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterUserInformationProviderIT.java
index f3ed958..bba6c20 100644
--- a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterUserInformationProviderIT.java
+++ b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/providers/TwitterUserInformationProviderIT.java
@@ -19,6 +19,7 @@
 package org.apache.streams.twitter.test.providers;
 
 import org.apache.streams.twitter.provider.TwitterUserInformationProvider;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -29,39 +30,39 @@ import java.io.LineNumberReader;
 
 public class TwitterUserInformationProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(TwitterUserInformationProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterUserInformationProviderIT.class);
 
-    @Test
-    public void testTwitterUserInformationProvider() throws Exception {
+  @Test
+  public void testTwitterUserInformationProvider() throws Exception {
 
-        String configfile = "./target/test-classes/TwitterUserInformationProviderIT.conf";
-        String outfile = "./target/test-classes/TwitterUserInformationProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/TwitterUserInformationProviderIT.conf";
+    String outfile = "./target/test-classes/TwitterUserInformationProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                TwitterUserInformationProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        TwitterUserInformationProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() > 750);
+    assert (outCounter.getLineNumber() > 750);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityConvertersTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityConvertersTest.java b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityConvertersTest.java
index 51f6294..24d646b 100644
--- a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityConvertersTest.java
+++ b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityConvertersTest.java
@@ -18,8 +18,6 @@
 
 package org.apache.streams.twitter.test.utils;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
 import org.apache.streams.converter.ActivityConverterUtil;
 import org.apache.streams.data.util.ActivityUtil;
 import org.apache.streams.jackson.StreamsJacksonMapper;
@@ -29,6 +27,9 @@ import org.apache.streams.twitter.pojo.Delete;
 import org.apache.streams.twitter.pojo.Follow;
 import org.apache.streams.twitter.pojo.Retweet;
 import org.apache.streams.twitter.pojo.Tweet;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -41,90 +42,98 @@ import java.util.List;
  */
 public class TwitterActivityConvertersTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterActivityConvertersTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterActivityConvertersTest.class);
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
 
-    private ActivityConverterUtil activityConverterUtil = ActivityConverterUtil.getInstance();
+  private ActivityConverterUtil activityConverterUtil = ActivityConverterUtil.getInstance();
 
-    private String tweetJson = "{\"created_at\":\"Wed Dec 11 22:27:34 +0000 2013\",\"id\":12345,\"id_str\":\"12345\",\"text\":\"text\",\"source\":\"source\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":91407775,\"id_str\":\"12345\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"\",\"url\":null,\"description\":null,\"protected\":false,\"followers_count\":136,\"friends_count\":0,\"listed_count\":1,\"created_at\":\"Fri Nov 20 19:29:02 +0000 2009\",\"favourites_count\":0,\"utc_offset\":null,\"time_zone\":null,\"geo_enabled\":false,\"verified\":false,\"statuses_count\":1793,\"lang\":\"en\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C0DEED\",\"profile_background_image_url\":\"http:\\/\\/profile_background_image_url.png\",\"profile_background_image_url_https\":\"https:\\/\\/profi
 le_background_image_url_https.png\",\"profile_background_tile\":false,\"profile_image_url\":\"http:\\/\\/profile_image_url.jpg\",\"profile_image_url_https\":\"https:\\/\\/profile_image_url_https.jpg\",\"profile_link_color\":\"0084B4\",\"profile_sidebar_border_color\":\"C0DEED\",\"profile_sidebar_fill_color\":\"DDEEF6\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"default_profile\":true,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[{\"url\":\"http:\\/\\/url\",\"expanded_url\":\"http:\\/\\/expanded_url\",\"display_url\":\"display_url\",\"indices\":[118,140]}],\"user_mentions\":[]},\"favorited\":false,\"retweeted\":false,\"possibly_sensitive\":false,\"filter_level\":\"medium\",\"lang\":\"en\"}\n";
-    private String retweetJson = "{\"created_at\":\"Wed Dec 11 22:27:34 +0000 2013\",\"id\":23456,\"id_str\":\"23456\",\"text\":\"text\",\"source\":\"web\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":163149656,\"id_str\":\"34567\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"location\",\"url\":\"http:\\/\\/www.youtube.com\\/watch?v=url\",\"description\":\"description\\u00ed\",\"protected\":false,\"followers_count\":41,\"friends_count\":75,\"listed_count\":2,\"created_at\":\"Mon Jul 05 17:35:49 +0000 2010\",\"favourites_count\":4697,\"utc_offset\":-10800,\"time_zone\":\"Buenos Aires\",\"geo_enabled\":false,\"verified\":false,\"statuses_count\":5257,\"lang\":\"es\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C4A64B\",\"profile_background_image_url\":\"http:\\/\\/a0.twimg.c
 om\\/profile_background_images\\/12345\\/12345.jpeg\",\"profile_background_image_url_https\":\"https:\\/\\/si0.twimg.com\\/profile_background_images\\/12345\\/12345.jpeg\",\"profile_background_tile\":true,\"profile_image_url\":\"http:\\/\\/pbs.twimg.com\\/profile_images\\/12345\\/12345.jpeg\",\"profile_image_url_https\":\"https:\\/\\/pbs.twimg.com\\/profile_images\\/12345\\/12345.jpeg\",\"profile_banner_url\":\"https:\\/\\/pbs.twimg.com\\/profile_banners\\/12345\\/12345\",\"profile_link_color\":\"BF415A\",\"profile_sidebar_border_color\":\"000000\",\"profile_sidebar_fill_color\":\"B17CED\",\"profile_text_color\":\"3D1957\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweeted_status\":{\"created_at\":\"Wed Dec 11 22:25:06 +0000 2013\",\"id\":34567,\"id_str\":\"34567\",\"text\":\"text\",\"s
 ource\":\"source\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":34567,\"id_str\":\"34567\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"\",\"url\":\"http:\\/\\/www.web.com\",\"description\":\"description\",\"protected\":false,\"followers_count\":34307,\"friends_count\":325,\"listed_count\":361,\"created_at\":\"Fri Apr 13 19:00:11 +0000 2012\",\"favourites_count\":44956,\"utc_offset\":3600,\"time_zone\":\"Madrid\",\"geo_enabled\":false,\"verified\":false,\"statuses_count\":24011,\"lang\":\"es\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"000000\",\"profile_background_image_url\":\"http:\\/\\/profile_background_image_url.jpeg\",\"profile_background_image_url_https\":\"https:\\/\\/si0.twimg.com\\/profile_background_images\\/34567\\/34567.jpeg\",\"profile_background_tile\":fa
 lse,\"profile_image_url\":\"http:\\/\\/pbs.twimg.com\\/profile_images\\/34567\\/34567.gif\",\"profile_image_url_https\":\"https:\\/\\/pbs.twimg.com\\/profile_images\\/34567\\/34567.gif\",\"profile_banner_url\":\"https:\\/\\/pbs.twimg.com\\/profile_banners\\/34567\\/34567\",\"profile_link_color\":\"FF00E1\",\"profile_sidebar_border_color\":\"FFFFFF\",\"profile_sidebar_fill_color\":\"F3F3F3\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":9,\"favorite_count\":6,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[],\"user_mentions\":[]},\"favorited\":false,\"retweeted\":false,\"lang\":\"es\"},\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[],\"user_mentions\":[{\"screen_name\":\"screen_name\",\"n
 ame\":\"name emocional\",\"id\":45678,\"id_str\":\"45678\",\"indices\":[3,14]}]},\"favorited\":false,\"retweeted\":false,\"filter_level\":\"medium\",\"lang\":\"es\"}\n";
+  private String tweetJson = "{\"created_at\":\"Wed Dec 11 22:27:34 +0000 2013\",\"id\":12345,\"id_str\":\"12345\",\"text\":\"text\",\"source\":\"source\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":91407775,\"id_str\":\"12345\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"\",\"url\":null,\"description\":null,\"protected\":false,\"followers_count\":136,\"friends_count\":0,\"listed_count\":1,\"created_at\":\"Fri Nov 20 19:29:02 +0000 2009\",\"favourites_count\":0,\"utc_offset\":null,\"time_zone\":null,\"geo_enabled\":false,\"verified\":false,\"statuses_count\":1793,\"lang\":\"en\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C0DEED\",\"profile_background_image_url\":\"http:\\/\\/profile_background_image_url.png\",\"profile_background_image_url_https\":\"https:\\/\\/profile
 _background_image_url_https.png\",\"profile_background_tile\":false,\"profile_image_url\":\"http:\\/\\/profile_image_url.jpg\",\"profile_image_url_https\":\"https:\\/\\/profile_image_url_https.jpg\",\"profile_link_color\":\"0084B4\",\"profile_sidebar_border_color\":\"C0DEED\",\"profile_sidebar_fill_color\":\"DDEEF6\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"default_profile\":true,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[{\"url\":\"http:\\/\\/url\",\"expanded_url\":\"http:\\/\\/expanded_url\",\"display_url\":\"display_url\",\"indices\":[118,140]}],\"user_mentions\":[]},\"favorited\":false,\"retweeted\":false,\"possibly_sensitive\":false,\"filter_level\":\"medium\",\"lang\":\"en\"}\n";
+  private String retweetJson = "{\"created_at\":\"Wed Dec 11 22:27:34 +0000 2013\",\"id\":23456,\"id_str\":\"23456\",\"text\":\"text\",\"source\":\"web\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":163149656,\"id_str\":\"34567\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"location\",\"url\":\"http:\\/\\/www.youtube.com\\/watch?v=url\",\"description\":\"description\\u00ed\",\"protected\":false,\"followers_count\":41,\"friends_count\":75,\"listed_count\":2,\"created_at\":\"Mon Jul 05 17:35:49 +0000 2010\",\"favourites_count\":4697,\"utc_offset\":-10800,\"time_zone\":\"Buenos Aires\",\"geo_enabled\":false,\"verified\":false,\"statuses_count\":5257,\"lang\":\"es\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C4A64B\",\"profile_background_image_url\":\"http:\\/\\/a0.twimg.com
 \\/profile_background_images\\/12345\\/12345.jpeg\",\"profile_background_image_url_https\":\"https:\\/\\/si0.twimg.com\\/profile_background_images\\/12345\\/12345.jpeg\",\"profile_background_tile\":true,\"profile_image_url\":\"http:\\/\\/pbs.twimg.com\\/profile_images\\/12345\\/12345.jpeg\",\"profile_image_url_https\":\"https:\\/\\/pbs.twimg.com\\/profile_images\\/12345\\/12345.jpeg\",\"profile_banner_url\":\"https:\\/\\/pbs.twimg.com\\/profile_banners\\/12345\\/12345\",\"profile_link_color\":\"BF415A\",\"profile_sidebar_border_color\":\"000000\",\"profile_sidebar_fill_color\":\"B17CED\",\"profile_text_color\":\"3D1957\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweeted_status\":{\"created_at\":\"Wed Dec 11 22:25:06 +0000 2013\",\"id\":34567,\"id_str\":\"34567\",\"text\":\"text\",\"sou
 rce\":\"source\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":34567,\"id_str\":\"34567\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"\",\"url\":\"http:\\/\\/www.web.com\",\"description\":\"description\",\"protected\":false,\"followers_count\":34307,\"friends_count\":325,\"listed_count\":361,\"created_at\":\"Fri Apr 13 19:00:11 +0000 2012\",\"favourites_count\":44956,\"utc_offset\":3600,\"time_zone\":\"Madrid\",\"geo_enabled\":false,\"verified\":false,\"statuses_count\":24011,\"lang\":\"es\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"000000\",\"profile_background_image_url\":\"http:\\/\\/profile_background_image_url.jpeg\",\"profile_background_image_url_https\":\"https:\\/\\/si0.twimg.com\\/profile_background_images\\/34567\\/34567.jpeg\",\"profile_background_tile\":fals
 e,\"profile_image_url\":\"http:\\/\\/pbs.twimg.com\\/profile_images\\/34567\\/34567.gif\",\"profile_image_url_https\":\"https:\\/\\/pbs.twimg.com\\/profile_images\\/34567\\/34567.gif\",\"profile_banner_url\":\"https:\\/\\/pbs.twimg.com\\/profile_banners\\/34567\\/34567\",\"profile_link_color\":\"FF00E1\",\"profile_sidebar_border_color\":\"FFFFFF\",\"profile_sidebar_fill_color\":\"F3F3F3\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":9,\"favorite_count\":6,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[],\"user_mentions\":[]},\"favorited\":false,\"retweeted\":false,\"lang\":\"es\"},\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[],\"user_mentions\":[{\"screen_name\":\"screen_name\",\"nam
 e\":\"name emocional\",\"id\":45678,\"id_str\":\"45678\",\"indices\":[3,14]}]},\"favorited\":false,\"retweeted\":false,\"filter_level\":\"medium\",\"lang\":\"es\"}\n";
 
-    @Test
-    public void testConvertTweet() throws Exception  {
-        Tweet tweet = mapper.readValue(tweetJson, Tweet.class);
-        List<Activity> activityList = activityConverterUtil.convert(tweet);
-        Assert.assertTrue(activityList.size() == 1);
-        Activity activity = activityList.get(0);
-        if( !ActivityUtil.isValid(activity) )
-            Assert.fail();
+  @Test
+  public void testConvertTweet() throws Exception  {
+    Tweet tweet = mapper.readValue(tweetJson, Tweet.class);
+    List<Activity> activityList = activityConverterUtil.convert(tweet);
+    Assert.assertTrue(activityList.size() == 1);
+    Activity activity = activityList.get(0);
+    if ( !ActivityUtil.isValid(activity) ) {
+      Assert.fail();
     }
-
-    @Test
-    public void testConvertRetweet() throws Exception  {
-        Retweet retweet = mapper.readValue(retweetJson, Retweet.class);
-        List<Activity> activityList = activityConverterUtil.convert(retweet);
-        Assert.assertTrue(activityList.size() == 1);
-        Activity activity = activityList.get(0);
-        if( !ActivityUtil.isValid(activity) )
-            Assert.fail();
+  }
+
+  @Test
+  public void testConvertRetweet() throws Exception  {
+    Retweet retweet = mapper.readValue(retweetJson, Retweet.class);
+    List<Activity> activityList = activityConverterUtil.convert(retweet);
+    Assert.assertTrue(activityList.size() == 1);
+    Activity activity = activityList.get(0);
+    if ( !ActivityUtil.isValid(activity) ) {
+      Assert.fail();
     }
-
-    @Test
-    public void testConvertDelete() throws Exception  {
-        Delete delete = mapper.readValue(retweetJson, Delete.class);
-        List<Activity> activityList = activityConverterUtil.convert(delete);
-        Assert.assertTrue(activityList.size() == 1);
-        Activity activity = activityList.get(0);
-        if( !ActivityUtil.isValid(activity) )
-            Assert.fail();
+  }
+
+  @Test
+  public void testConvertDelete() throws Exception  {
+    Delete delete = mapper.readValue(retweetJson, Delete.class);
+    List<Activity> activityList = activityConverterUtil.convert(delete);
+    Assert.assertTrue(activityList.size() == 1);
+    Activity activity = activityList.get(0);
+    if ( !ActivityUtil.isValid(activity) ) {
+      Assert.fail();
     }
-
-    @Test
-    public void testConvertFollow() throws Exception {
-        Follow follow = mapper.readValue(retweetJson, Follow.class);
-        List<Activity> activityList = activityConverterUtil.convert(follow);
-        Assert.assertTrue(activityList.size() == 1);
-        Activity activity = activityList.get(0);
-        if( !ActivityUtil.isValid(activity) )
-            Assert.fail();
+  }
+
+  @Test
+  public void testConvertFollow() throws Exception {
+    Follow follow = mapper.readValue(retweetJson, Follow.class);
+    List<Activity> activityList = activityConverterUtil.convert(follow);
+    Assert.assertTrue(activityList.size() == 1);
+    Activity activity = activityList.get(0);
+    if ( !ActivityUtil.isValid(activity) ) {
+      Assert.fail();
     }
-
-    @Test
-    public void testConvertTweetString() {
-        List<Activity> activityList = activityConverterUtil.convert(tweetJson);
-        Assert.assertTrue(activityList.size() == 1);
-        Activity activity = activityList.get(0);
-        if( !ActivityUtil.isValid(activity) )
-            Assert.fail();
+  }
+
+  @Test
+  public void testConvertTweetString() {
+    List<Activity> activityList = activityConverterUtil.convert(tweetJson);
+    Assert.assertTrue(activityList.size() == 1);
+    Activity activity = activityList.get(0);
+    if ( !ActivityUtil.isValid(activity) ) {
+      Assert.fail();
     }
-
-    @Test
-    public void testConvertRetweetString() {
-        List<Activity> activityList = activityConverterUtil.convert(retweetJson);
-        Assert.assertTrue(activityList.size() == 1);
-        Activity activity = activityList.get(0);
-        if( !ActivityUtil.isValid(activity) )
-            Assert.fail();
+  }
+
+  @Test
+  public void testConvertRetweetString() {
+    List<Activity> activityList = activityConverterUtil.convert(retweetJson);
+    Assert.assertTrue(activityList.size() == 1);
+    Activity activity = activityList.get(0);
+    if ( !ActivityUtil.isValid(activity) ) {
+      Assert.fail();
     }
-
-    @Test
-    public void testConvertDeleteString() {
-        String deleteJson = "{\"delete\":{\"status\":{\"id\":56789,\"user_id\":67890,\"id_str\":\"56789\",\"user_id_str\":\"67890\"}}}\n";
-        List<Activity> activityList = activityConverterUtil.convert(deleteJson);
-        Assert.assertTrue(activityList.size() == 1);
-        Activity activity = activityList.get(0);
-        if( !ActivityUtil.isValid(activity) )
-            Assert.fail();
+  }
+
+  @Test
+  public void testConvertDeleteString() {
+    String deleteJson = "{\"delete\":{\"status\":{\"id\":56789,\"user_id\":67890,\"id_str\":\"56789\",\"user_id_str\":\"67890\"}}}\n";
+    List<Activity> activityList = activityConverterUtil.convert(deleteJson);
+    Assert.assertTrue(activityList.size() == 1);
+    Activity activity = activityList.get(0);
+    if ( !ActivityUtil.isValid(activity) ) {
+      Assert.fail();
     }
-
-    @Test
-    public void testConvertFollowString() {
-        String followJson = "{\"follower\":{\"id\":12345},\"followee\":{\"id\":56789}}\n";
-        List<Activity> activityList = activityConverterUtil.convert(followJson);
-        Assert.assertTrue(activityList.size() == 1);
-        Activity activity = activityList.get(0);
-        if( !ActivityUtil.isValid(activity) )
-            Assert.fail();
+  }
+
+  @Test
+  public void testConvertFollowString() {
+    String followJson = "{\"follower\":{\"id\":12345},\"followee\":{\"id\":56789}}\n";
+    List<Activity> activityList = activityConverterUtil.convert(followJson);
+    Assert.assertTrue(activityList.size() == 1);
+    Activity activity = activityList.get(0);
+    if ( !ActivityUtil.isValid(activity) ) {
+      Assert.fail();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityObjectsConvertersTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityObjectsConvertersTest.java b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityObjectsConvertersTest.java
index c110670..4f2a4fd 100644
--- a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityObjectsConvertersTest.java
+++ b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterActivityObjectsConvertersTest.java
@@ -18,11 +18,8 @@
 
 package org.apache.streams.twitter.test.utils;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
 import org.apache.streams.converter.ActivityObjectConverterProcessorConfiguration;
 import org.apache.streams.converter.ActivityObjectConverterUtil;
-import org.apache.streams.data.DocumentClassifier;
 import org.apache.streams.data.util.ActivityUtil;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.ActivityObject;
@@ -30,45 +27,48 @@ import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
 import org.apache.streams.twitter.converter.TwitterDocumentClassifier;
 import org.apache.streams.twitter.converter.TwitterJsonUserActivityObjectConverter;
 import org.apache.streams.twitter.pojo.User;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.ArrayList;
-
 /**
  * Tests {org.apache.streams.twitter.converter.*}
  */
 public class TwitterActivityObjectsConvertersTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterActivityObjectsConvertersTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterActivityObjectsConvertersTest.class);
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
 
-    private ActivityObjectConverterProcessorConfiguration activityObjectConverterProcessorConfiguration =
-            new ActivityObjectConverterProcessorConfiguration()
-                .withClassifiers(Lists.newArrayList(new TwitterDocumentClassifier()))
-                .withConverters(Lists.newArrayList(new TwitterJsonUserActivityObjectConverter()));
+  private ActivityObjectConverterProcessorConfiguration activityObjectConverterProcessorConfiguration =
+      new ActivityObjectConverterProcessorConfiguration()
+          .withClassifiers(Lists.newArrayList(new TwitterDocumentClassifier()))
+          .withConverters(Lists.newArrayList(new TwitterJsonUserActivityObjectConverter()));
 
-    private ActivityObjectConverterUtil activityObjectConverterUtil = ActivityObjectConverterUtil.getInstance(activityObjectConverterProcessorConfiguration);
+  private ActivityObjectConverterUtil activityObjectConverterUtil = ActivityObjectConverterUtil.getInstance(activityObjectConverterProcessorConfiguration);
 
-    private String userJson = "{\"id\":1663018644,\"id_str\":\"1663018644\",\"name\":\"M.R. Clark\",\"screen_name\":\"cantennisfan\",\"location\":\"\",\"url\":null,\"description\":null,\"protected\":false,\"verified\":false,\"followers_count\":0,\"friends_count\":5,\"listed_count\":0,\"favourites_count\":2,\"statuses_count\":72,\"created_at\":\"Sun Aug 11 17:23:47 +0000 2013\",\"utc_offset\":-18000,\"time_zone\":\"Eastern Time (US & Canada)\",\"geo_enabled\":false,\"lang\":\"en\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C0DEED\",\"profile_background_image_url\":\"http://abs.twimg.com/images/themes/theme1/bg.png\",\"profile_background_image_url_https\":\"https://abs.twimg.com/images/themes/theme1/bg.png\",\"profile_background_tile\":false,\"profile_link_color\":\"0084B4\",\"profile_sidebar_border_color\":\"C0DEED\",\"profile_sidebar_fill_color\":\"DDEEF6\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"profile_im
 age_url\":\"http://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png\",\"profile_image_url_https\":\"https://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png\",\"default_profile\":true,\"default_profile_image\":true,\"following\":null,\"follow_request_sent\":null,\"notifications\":null,\"status\":{\"created_at\":\"Thu Jan 01 14:11:48 +0000 2015\",\"id\":550655634706669568,\"id_str\":\"550655634706669568\",\"text\":\"CBC Media Centre - CBC - Air Farce New Year's Eve 2014/2015: http://t.co/lMlL9VbC5e\",\"source\":\"<a href=\\\"https://dev.twitter.com/docs/tfw\\\" rel=\\\"nofollow\\\">Twitter for Websites</a>\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"trends\":
 [],\"urls\":[{\"url\":\"http://t.co/lMlL9VbC5e\",\"expanded_url\":\"http://www.cbc.ca/mediacentre/air-farce-new-years-eve-20142015.html#.VKVVarDhVxR.twitter\",\"display_url\":\"cbc.ca/mediacentre/ai\u2026\",\"indices\":[61,83]}],\"user_mentions\":[],\"symbols\":[]},\"favorited\":false,\"retweeted\":false,\"possibly_sensitive\":false,\"filter_level\":\"medium\",\"lang\":\"en\",\"timestamp_ms\":\"1420121508658\"}}\n";
+  private String userJson = "{\"id\":1663018644,\"id_str\":\"1663018644\",\"name\":\"M.R. Clark\",\"screen_name\":\"cantennisfan\",\"location\":\"\",\"url\":null,\"description\":null,\"protected\":false,\"verified\":false,\"followers_count\":0,\"friends_count\":5,\"listed_count\":0,\"favourites_count\":2,\"statuses_count\":72,\"created_at\":\"Sun Aug 11 17:23:47 +0000 2013\",\"utc_offset\":-18000,\"time_zone\":\"Eastern Time (US & Canada)\",\"geo_enabled\":false,\"lang\":\"en\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C0DEED\",\"profile_background_image_url\":\"http://abs.twimg.com/images/themes/theme1/bg.png\",\"profile_background_image_url_https\":\"https://abs.twimg.com/images/themes/theme1/bg.png\",\"profile_background_tile\":false,\"profile_link_color\":\"0084B4\",\"profile_sidebar_border_color\":\"C0DEED\",\"profile_sidebar_fill_color\":\"DDEEF6\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"profile_imag
 e_url\":\"http://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png\",\"profile_image_url_https\":\"https://abs.twimg.com/sticky/default_profile_images/default_profile_0_normal.png\",\"default_profile\":true,\"default_profile_image\":true,\"following\":null,\"follow_request_sent\":null,\"notifications\":null,\"status\":{\"created_at\":\"Thu Jan 01 14:11:48 +0000 2015\",\"id\":550655634706669568,\"id_str\":\"550655634706669568\",\"text\":\"CBC Media Centre - CBC - Air Farce New Year's Eve 2014/2015: http://t.co/lMlL9VbC5e\",\"source\":\"<a href=\\\"https://dev.twitter.com/docs/tfw\\\" rel=\\\"nofollow\\\">Twitter for Websites</a>\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"trends\":[]
 ,\"urls\":[{\"url\":\"http://t.co/lMlL9VbC5e\",\"expanded_url\":\"http://www.cbc.ca/mediacentre/air-farce-new-years-eve-20142015.html#.VKVVarDhVxR.twitter\",\"display_url\":\"cbc.ca/mediacentre/ai\u2026\",\"indices\":[61,83]}],\"user_mentions\":[],\"symbols\":[]},\"favorited\":false,\"retweeted\":false,\"possibly_sensitive\":false,\"filter_level\":\"medium\",\"lang\":\"en\",\"timestamp_ms\":\"1420121508658\"}}\n";
 
-    @Test
-    public void testConvertUser() throws Exception {
-        User user = mapper.readValue(userJson, User.class);
-        ActivityObject activityObject = activityObjectConverterUtil.convert(user);
-        assert( activityObject != null );
-        if( !ActivityUtil.isValid(activityObject) )
-            Assert.fail();
+  @Test
+  public void testConvertUser() throws Exception {
+    User user = mapper.readValue(userJson, User.class);
+    ActivityObject activityObject = activityObjectConverterUtil.convert(user);
+    assert ( activityObject != null );
+    if ( !ActivityUtil.isValid(activityObject) ) {
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void testConvertUserString() {
-        ActivityObject activityObject = activityObjectConverterUtil.convert(userJson);
-        assert( activityObject != null );
-        if( !ActivityUtil.isValid(activityObject) )
-            Assert.fail();
+  @Test
+  public void testConvertUserString() {
+    ActivityObject activityObject = activityObjectConverterUtil.convert(userJson);
+    assert ( activityObject != null );
+    if ( !ActivityUtil.isValid(activityObject) ) {
+      Assert.fail();
     }
+  }
 }


[30/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookActivityUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookActivityUtil.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookActivityUtil.java
index 2cf2a7c..93ac199 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookActivityUtil.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookActivityUtil.java
@@ -18,11 +18,6 @@
 
 package org.apache.streams.facebook.serializer;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.base.Optional;
-import com.google.common.collect.Lists;
 import org.apache.streams.data.util.ActivityUtil;
 import org.apache.streams.exceptions.ActivitySerializerException;
 import org.apache.streams.facebook.Cover;
@@ -38,6 +33,13 @@ import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Image;
 import org.apache.streams.pojo.json.Provider;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -46,158 +48,163 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+/**
+ * FacebookActivityUtil helps convert facebook data to activity formats.
+ */
 public class FacebookActivityUtil {
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookActivityUtil.class);
-
-    /**
-     * Updates the given Activity object with the values from the Page
-     * @param page the object to use as the source
-     * @param activity the target of the updates.  Will receive all values from the Page.
-     * @throws org.apache.streams.exceptions.ActivitySerializerException
-     */
-    public static void updateActivity(Page page, Activity activity) throws ActivitySerializerException {
-        activity.setActor(buildActor(page));
-        activity.setId(null);
-        activity.setProvider(getProvider());
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookActivityUtil.class);
+
+  /**
+   * Updates the given Activity object with the values from the Page.
+   * @param page the object to use as the source
+   * @param activity the target of the updates.  Will receive all values from the Page.
+   * @throws ActivitySerializerException
+   */
+  public static void updateActivity(Page page, Activity activity) throws ActivitySerializerException {
+    activity.setActor(buildActor(page));
+    activity.setId(null);
+    activity.setProvider(getProvider());
+  }
+
+  /**
+   * Updates the given Activity object with the values from the Post.
+   * @param post post
+   * @param activity activity
+   * @throws ActivitySerializerException
+   */
+  public static void updateActivity(Post post, Activity activity) throws ActivitySerializerException {
+    activity.setActor(buildActor(post));
+    activity.setId(formatId(post.getId()));
+    activity.setProvider(getProvider());
+    activity.setUpdated(post.getUpdatedTime());
+    activity.setPublished(post.getCreatedTime());
+
+    if (post.getLink() != null && post.getLink().length() > 0) {
+      List<String> links = new ArrayList<>();
+      links.add(post.getLink());
+      activity.setLinks(links);
     }
 
-    /**
-     * Updates the given Activity object with the values from the Post
-     * @param post
-     * @param activity
-     * @throws ActivitySerializerException
-     */
-    public static void updateActivity(Post post, Activity activity) throws ActivitySerializerException {
-        activity.setActor(buildActor(post));
-        activity.setId(formatId(post.getId()));
-        activity.setProvider(getProvider());
-        activity.setUpdated(post.getUpdatedTime());
-        activity.setPublished(post.getCreatedTime());
-
-        if(post.getLink() != null && post.getLink().length() > 0) {
-            List<String> links = new ArrayList<>();
-            links.add(post.getLink());
-            activity.setLinks(links);
-        }
-
-        activity.setContent(post.getMessage());
-
-        activity.setVerb("post");
-        activity.setObject(buildObject(post));
-        buildExtensions(activity, post);
+    activity.setContent(post.getMessage());
+
+    activity.setVerb("post");
+    activity.setObject(buildObject(post));
+    buildExtensions(activity, post);
+  }
+
+  /**
+   * Builds out the {@link org.apache.streams.pojo.json.ActivityObject} from the given {@link Post}.
+   * @param post
+   * @return {@link org.apache.streams.pojo.json.ActivityObject}
+   */
+  public static ActivityObject buildObject(Post post) {
+    ActivityObject activityObject = new ActivityObject();
+
+    try {
+      activityObject.setContent(post.getMessage());
+      activityObject.setPublished(post.getCreatedTime());
+      activityObject.setUpdated(post.getUpdatedTime());
+      activityObject.setDisplayName(post.getFrom().getName());
+      activityObject.setId(formatId(post.getId()));
+      activityObject.setObjectType(post.getType());
+      activityObject.setUrl(post.getLink());
+
+      if (activityObject.getObjectType().equals("photo")) {
+        Image image = new Image();
+        image.setUrl(activityObject.getUrl());
+        activityObject.setImage(image);
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to build Activity object for post: {}, exception: {}", post, ex);
     }
 
-    /**
-     * Builds out the {@link org.apache.streams.pojo.json.ActivityObject} from the given {@link Post}
-     * @param post
-     * @return {@link org.apache.streams.pojo.json.ActivityObject}
-     */
-    public static ActivityObject buildObject(Post post) {
-        ActivityObject activityObject = new ActivityObject();
-
-        try {
-            activityObject.setContent(post.getMessage());
-            activityObject.setPublished(post.getCreatedTime());
-            activityObject.setUpdated(post.getUpdatedTime());
-            activityObject.setDisplayName(post.getFrom().getName());
-            activityObject.setId(formatId(post.getId()));
-            activityObject.setObjectType(post.getType());
-            activityObject.setUrl(post.getLink());
-
-            if(activityObject.getObjectType().equals("photo")) {
-                Image image = new Image();
-                image.setUrl(activityObject.getUrl());
-                activityObject.setImage(image);
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to build Activity object for post: {}, exception: {}", post, e);
-        }
-
-        return activityObject;
+    return activityObject;
+  }
+
+  /**
+   * Gets the common facebook {@link org.apache.streams.pojo.json.Provider} object.
+   * @return a provider object representing Facebook
+   */
+  public static Provider getProvider() {
+    Provider provider = new Provider();
+    provider.setId("id:providers:facebook");
+    provider.setDisplayName("Facebook");
+
+    return provider;
+  }
+
+  /**
+   * Builds the activity {@link org.apache.streams.pojo.json.ActivityObject} actor from the Page.
+   * @param page the object to use as the source
+   * @return a valid Actor populated from the Page
+   */
+  public static ActivityObject buildActor(Page page) {
+    ActivityObject actor = new ActivityObject();
+    actor.setId(formatId(
+        Optional.fromNullable(
+            page.getId())
+            .or(Optional.of(page.getId().toString()))
+            .orNull()
+    ));
+
+    actor.setDisplayName(page.getName());
+    actor.setAdditionalProperty("handle", page.getUsername());
+    actor.setSummary(page.getAbout());
+
+    if (page.getLink() != null) {
+      actor.setUrl(page.getLink());
     }
 
-    /**
-     * Gets the common facebook {@link org.apache.streams.pojo.json.Provider} object
-     * @return a provider object representing Facebook
-     */
-    public static Provider getProvider() {
-        Provider provider = new Provider();
-        provider.setId("id:providers:facebook");
-        provider.setDisplayName("Facebook");
+    Image profileImage = new Image();
+    Cover cover = page.getCover();
 
-        return provider;
+    if (cover != null) {
+      profileImage.setUrl(cover.getSource());
     }
-
-    /**
-     * Builds the activity {@link org.apache.streams.pojo.json.ActivityObject} actor from the Page
-     * @param page the object to use as the source
-     * @return a valid Actor populated from the Page
-     */
-    public static ActivityObject buildActor(Page page) {
-        ActivityObject actor = new ActivityObject();
-        actor.setId(formatId(
-                Optional.fromNullable(
-                        page.getId())
-                        .or(Optional.of(page.getId().toString()))
-                        .orNull()
-        ));
-
-        actor.setDisplayName(page.getName());
-        actor.setAdditionalProperty("handle", page.getUsername());
-        actor.setSummary(page.getAbout());
-
-        if (page.getLink()!=null){
-            actor.setUrl(page.getLink());
-        }
-
-        Image profileImage = new Image();
-        Cover cover = page.getCover();
-
-        if(cover != null)
-            profileImage.setUrl(cover.getSource());
-        actor.setImage(profileImage);
-
-        buildExtensions(actor, page);
-
-        return actor;
+    actor.setImage(profileImage);
+
+    buildExtensions(actor, page);
+
+    return actor;
+  }
+
+  /**
+   * Builds an {@link org.apache.streams.pojo.json.ActivityObject} object from the {@link Post}.
+   * @param post post
+   * @return {@link org.apache.streams.pojo.json.ActivityObject}
+   */
+  public static ActivityObject buildActor(Post post) {
+    ActivityObject actor = new ActivityObject();
+
+    try {
+      actor.setId(formatId(
+          Optional.fromNullable(
+              post.getFrom().getId())
+              .or(Optional.of(post.getFrom().getId()))
+              .orNull()
+      ));
+
+      actor.setDisplayName(post.getFrom().getName());
+      actor.setAdditionalProperty("handle", post.getFrom().getName());
+    } catch (Exception ex) {
+      LOGGER.error("Exception trying to build actor for Post: {}, {}", post, ex);
     }
 
-    /**
-     * Builds an {@link org.apache.streams.pojo.json.ActivityObject} object from the {@link Post}
-     * @param post
-     * @return {@link org.apache.streams.pojo.json.ActivityObject}
-     */
-    public static ActivityObject buildActor(Post post) {
-        ActivityObject actor = new ActivityObject();
-
-        try {
-            actor.setId(formatId(
-                    Optional.fromNullable(
-                            post.getFrom().getId())
-                            .or(Optional.of(post.getFrom().getId()))
-                            .orNull()
-            ));
-
-            actor.setDisplayName(post.getFrom().getName());
-            actor.setAdditionalProperty("handle", post.getFrom().getName());
-        } catch (Exception e) {
-            LOGGER.error("Exception trying to build actor for Post: {}, {}", post, e);
-        }
-
-        return actor;
-    }
+    return actor;
+  }
 
-    /**
-     * Builds the actor extensions given the page object
-     * @param actor
-     * @param page
-     */
-    public static void buildExtensions(ActivityObject actor, Page page) {
-        Map<String, Object> extensions = new HashMap<>();
-        Location location = page.getLocation();
+  /**
+   * Builds the actor extensions given the page object.
+   * @param actor actor
+   * @param page page
+   */
+  public static void buildExtensions(ActivityObject actor, Page page) {
+    Map<String, Object> extensions = new HashMap<>();
+    Location location = page.getLocation();
 
-        if(location != null)
-            extensions.put("location", page.getLocation().toString());
+    if (location != null) {
+      extensions.put("location", page.getLocation().toString());
+    }
 
         extensions.put("favorites", page.getTalkingAboutCount());
         extensions.put("followers", page.getFanCount());

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookStreamsPostSerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookStreamsPostSerializer.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookStreamsPostSerializer.java
index f829b08..772af83 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookStreamsPostSerializer.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/serializer/FacebookStreamsPostSerializer.java
@@ -27,52 +27,63 @@ import org.apache.streams.pojo.json.Provider;
 
 import java.util.List;
 
+
 /**
  * Converts {@link org.apache.streams.facebook.Post} to {@link org.apache.streams.pojo.json.Activity}
  */
 public class FacebookStreamsPostSerializer implements ActivitySerializer<Post> {
 
-    private static final String FACEBOOK_STREAMS_ID = "id:provider:facebook";
-    private static final String ID_PREFIX = "id:facebook:";
-    private static final String PROVIDER_DISPLAY = "Facebook";
+  private static final String FACEBOOK_STREAMS_ID = "id:provider:facebook";
+  private static final String ID_PREFIX = "id:facebook:";
+  private static final String PROVIDER_DISPLAY = "Facebook";
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public Post serialize(Activity deserialized) throws ActivitySerializerException {
-        return null;
-    }
+  @Override
+  public Post serialize(Activity deserialized) throws ActivitySerializerException {
+    return null;
+  }
 
-    @Override
-    public Activity deserialize(Post post) throws ActivitySerializerException {
-        Activity activity = new Activity();
-        activity.setActor(createActor(post));
+  @Override
+  public Activity deserialize(Post post) throws ActivitySerializerException {
+    Activity activity = new Activity();
+    activity.setActor(createActor(post));
 
-        activity.setId(post.getId());
-        activity.setContent(post.getMessage());
-        return null;
-    }
+    activity.setId(post.getId());
+    activity.setContent(post.getMessage());
+    return null;
+  }
 
-    @Override
-    public List<Activity> deserializeAll(List<Post> serializedList) {
-        return null;
-    }
+  @Override
+  public List<Activity> deserializeAll(List<Post> serializedList) {
+    return null;
+  }
 
-    public ActivityObject createActor(Post post) {
-        ActivityObject actor = new ActivityObject();
-        actor.setDisplayName(post.getFrom().getName());
-        actor.setId(ID_PREFIX+post.getFrom().getId());
-        return actor;
-    }
+  /**
+   * createActor.
+   * @param post post
+   * @return ActivityObject
+   */
+  public ActivityObject createActor(Post post) {
+    ActivityObject actor = new ActivityObject();
+    actor.setDisplayName(post.getFrom().getName());
+    actor.setId(ID_PREFIX + post.getFrom().getId());
+    return actor;
+  }
 
-    public Provider createProvider(Post post) {
-        Provider provider = new Provider();
-        provider.setId(FACEBOOK_STREAMS_ID);
-        provider.setDisplayName(PROVIDER_DISPLAY);
-        return provider;
-    }
+  /**
+   * Provider.
+   * @param post post
+   * @return Provider
+   */
+  public Provider createProvider(Post post) {
+    Provider provider = new Provider();
+    provider.setId(FACEBOOK_STREAMS_ID);
+    provider.setDisplayName(PROVIDER_DISPLAY);
+    return provider;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/FacebookEventClassifierTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/FacebookEventClassifierTest.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/FacebookEventClassifierTest.java
index 076aca9..9d7604a 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/FacebookEventClassifierTest.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/FacebookEventClassifierTest.java
@@ -21,37 +21,38 @@ package org.apache.streams.facebook.test;
 import org.apache.streams.facebook.Page;
 import org.apache.streams.facebook.Post;
 import org.apache.streams.facebook.provider.FacebookEventClassifier;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
 public class FacebookEventClassifierTest {
-    private String empty = "";
-    private String def = "{}";
-    private String post = "{\"metadata\":null,\"id\":\"687664828_10153082499914829\",\"from\":{\"metadata\":null,\"id\":\"687664828\",\"name\":\"Steve Blackmon\",\"category\":null,\"createdTime\":null},\"message\":null,\"picture\":\"https://fbexternal-a.akamaihd.net/app_full_proxy.php?app=184136951108&v=1&size=z&cksum=7f4094dff37cedd69072cd2c0b3728b7&src=https%3A%2F%2Fstatic.tripit.com%2Fimages%2Fplaces%2Fsantamonica.jpg%3Fv%3D2014-08-13\",\"link\":\"http://www.tripit.com/trip/show/id/130372979/traveler_fb_uid/687664828?us=fc&um=fa&un=fd\",\"name\":\"Steve is about to leave on a trip to Santa Monica, CA.\",\"caption\":\"Aug 2014 for 3 days\",\"description\":\"TripIt - Free online trip planner and free travel itinerary website for organizing vacations, group trips or business travel\",\"source\":null,\"icon\":\"https://fbcdn-photos-d-a.akamaihd.net/hphotos-ak-xpa1/t39.2080-0/851580_10151367869221109_1073679965_n.gif\",\"actions\":[{\"name\":\"Comment\",\"link\":\"https://www.facebook
 .com/687664828/posts/10153082499914829\"},{\"name\":\"Like\",\"link\":\"https://www.facebook.com/687664828/posts/10153082499914829\"},{\"name\":\"Plan a trip on TripIt\",\"link\":\"http://www.tripit.com/?us=fc&um=fa&un=fd\"}],\"privacy\":{\"value\":\"ALL_FRIENDS\",\"friends\":\"EMPTY\",\"description\":[\"Your friends\"]},\"type\":\"link\",\"sharesCount\":null,\"place\":null,\"statusType\":\"app_created_story\",\"story\":null,\"objectId\":null,\"application\":{\"id\":\"184136951108\",\"name\":\"TripIt\",\"description\":null,\"category\":null,\"company\":null,\"iconUrl\":null,\"subcategory\":null,\"link\":null,\"logoUrl\":null,\"dailyActiveUsers\":null,\"weeklyActiveUsers\":null,\"monthlyActiveUsers\":null,\"namespace\":\"tripitcom\",\"authDialogDataHelpUrl\":null,\"authDialogDescription\":null,\"authDialogHeadline\":null,\"authDialogPermsExplanation\":null,\"authReferralDefaultActivityPrivacy\":null,\"authReferralResponseType\":null,\"canvasUrl\":null,\"contactEmail\":null,\"createdT
 ime\":null,\"creatorUid\":null,\"deauthCallbackUrl\":null,\"iphoneAppStoreId\":null,\"hostingUrl\":null,\"mobileWebUrl\":null,\"pageTabDefaultName\":null,\"pageTabUrl\":null,\"privacyPolicyUrl\":null,\"secureCanvasUrl\":null,\"securePageTabUrl\":null,\"serverIpWhitelist\":null,\"termsOfServiceUrl\":null,\"userSupportEmail\":null,\"userSupportUrl\":null,\"websiteUrl\":null,\"canvasName\":null},\"createdTime\":\"2014-08-13T12:22:20.000+0000\",\"updatedTime\":\"2014-08-13T12:22:20.000+0000\",\"scheduledPublishTime\":null,\"targeting\":null,\"published\":null}";
-    private String page = "{\"metadata\":null,\"id\":\"142803045874943\",\"name\":\"Senator Angus S. King, Jr.\",\"category\":\"Government official\",\"createdTime\":null,\"link\":\"https://www.facebook.com/SenatorAngusSKingJr\",\"likes\":10246,\"location\":{\"street\":\"359 Dirksen Senate Office Building\",\"city\":\"Washington, District of Columbia\",\"state\":\"DC\",\"country\":\"United States\",\"zip\":\"20510\",\"latitude\":null,\"longitude\":null,\"text\":null},\"phone\":\"202-224-5344\",\"checkins\":0,\"picture\":null,\"cover\":{\"id\":null,\"source\":\"https://fbcdn-sphotos-g-a.akamaihd.net/hphotos-ak-xpa1/v/t1.0-9/10288792_321537751334804_8200105519500362465_n.jpg?oh=fbcde9b3e1e011dfa3e699628629bc53&oe=546FB617&__gda__=1416717487_3fa5781d7d9c3d58f2bc798a36ac6fc0\",\"offsetY\":9},\"website\":\"http://www.king.senate.gov\",\"talkingAboutCount\":5034,\"accessToken\":null,\"wereHereCount\":0,\"about\":\"Welcome to the official Facebook page of Senator Angus S. King, Jr. (I-ME).
 \\nhttp://king.senate.gov\\nhttps://twitter.com/SenAngusKing\\nhttps://www.youtube.com/SenatorAngusKing\",\"username\":\"SenatorAngusSKingJr\",\"published\":true,\"communityPage\":false}";
-
-
-    @Test(expected=IllegalArgumentException.class)
-    public void emptyJSONTest() {
-        Class inClass = FacebookEventClassifier.detectClass(empty);
-    }
-
-    @Test
-    public void defaultDetectTest() {
-        Class inClass = FacebookEventClassifier.detectClass(post);
-        assertEquals(inClass, Post.class);
-    }
-
-    @Test
-    public void postDetectTest() {
-        Class inClass = FacebookEventClassifier.detectClass(post);
-        assertEquals(inClass, Post.class);
-    }
-
-    @Test
-    public void pageDetectTest() {
-        Class inClass = FacebookEventClassifier.detectClass(page);
-        assertEquals(inClass, Page.class);
-    }
+
+  private String empty = "";
+  private String def = "{}";
+  private String post = "{\"metadata\":null,\"id\":\"687664828_10153082499914829\",\"from\":{\"metadata\":null,\"id\":\"687664828\",\"name\":\"Steve Blackmon\",\"category\":null,\"createdTime\":null},\"message\":null,\"picture\":\"https://fbexternal-a.akamaihd.net/app_full_proxy.php?app=184136951108&v=1&size=z&cksum=7f4094dff37cedd69072cd2c0b3728b7&src=https%3A%2F%2Fstatic.tripit.com%2Fimages%2Fplaces%2Fsantamonica.jpg%3Fv%3D2014-08-13\",\"link\":\"http://www.tripit.com/trip/show/id/130372979/traveler_fb_uid/687664828?us=fc&um=fa&un=fd\",\"name\":\"Steve is about to leave on a trip to Santa Monica, CA.\",\"caption\":\"Aug 2014 for 3 days\",\"description\":\"TripIt - Free online trip planner and free travel itinerary website for organizing vacations, group trips or business travel\",\"source\":null,\"icon\":\"https://fbcdn-photos-d-a.akamaihd.net/hphotos-ak-xpa1/t39.2080-0/851580_10151367869221109_1073679965_n.gif\",\"actions\":[{\"name\":\"Comment\",\"link\":\"https://www.facebook.c
 om/687664828/posts/10153082499914829\"},{\"name\":\"Like\",\"link\":\"https://www.facebook.com/687664828/posts/10153082499914829\"},{\"name\":\"Plan a trip on TripIt\",\"link\":\"http://www.tripit.com/?us=fc&um=fa&un=fd\"}],\"privacy\":{\"value\":\"ALL_FRIENDS\",\"friends\":\"EMPTY\",\"description\":[\"Your friends\"]},\"type\":\"link\",\"sharesCount\":null,\"place\":null,\"statusType\":\"app_created_story\",\"story\":null,\"objectId\":null,\"application\":{\"id\":\"184136951108\",\"name\":\"TripIt\",\"description\":null,\"category\":null,\"company\":null,\"iconUrl\":null,\"subcategory\":null,\"link\":null,\"logoUrl\":null,\"dailyActiveUsers\":null,\"weeklyActiveUsers\":null,\"monthlyActiveUsers\":null,\"namespace\":\"tripitcom\",\"authDialogDataHelpUrl\":null,\"authDialogDescription\":null,\"authDialogHeadline\":null,\"authDialogPermsExplanation\":null,\"authReferralDefaultActivityPrivacy\":null,\"authReferralResponseType\":null,\"canvasUrl\":null,\"contactEmail\":null,\"createdTim
 e\":null,\"creatorUid\":null,\"deauthCallbackUrl\":null,\"iphoneAppStoreId\":null,\"hostingUrl\":null,\"mobileWebUrl\":null,\"pageTabDefaultName\":null,\"pageTabUrl\":null,\"privacyPolicyUrl\":null,\"secureCanvasUrl\":null,\"securePageTabUrl\":null,\"serverIpWhitelist\":null,\"termsOfServiceUrl\":null,\"userSupportEmail\":null,\"userSupportUrl\":null,\"websiteUrl\":null,\"canvasName\":null},\"createdTime\":\"2014-08-13T12:22:20.000+0000\",\"updatedTime\":\"2014-08-13T12:22:20.000+0000\",\"scheduledPublishTime\":null,\"targeting\":null,\"published\":null}";
+  private String page = "{\"metadata\":null,\"id\":\"142803045874943\",\"name\":\"Senator Angus S. King, Jr.\",\"category\":\"Government official\",\"createdTime\":null,\"link\":\"https://www.facebook.com/SenatorAngusSKingJr\",\"likes\":10246,\"location\":{\"street\":\"359 Dirksen Senate Office Building\",\"city\":\"Washington, District of Columbia\",\"state\":\"DC\",\"country\":\"United States\",\"zip\":\"20510\",\"latitude\":null,\"longitude\":null,\"text\":null},\"phone\":\"202-224-5344\",\"checkins\":0,\"picture\":null,\"cover\":{\"id\":null,\"source\":\"https://fbcdn-sphotos-g-a.akamaihd.net/hphotos-ak-xpa1/v/t1.0-9/10288792_321537751334804_8200105519500362465_n.jpg?oh=fbcde9b3e1e011dfa3e699628629bc53&oe=546FB617&__gda__=1416717487_3fa5781d7d9c3d58f2bc798a36ac6fc0\",\"offsetY\":9},\"website\":\"http://www.king.senate.gov\",\"talkingAboutCount\":5034,\"accessToken\":null,\"wereHereCount\":0,\"about\":\"Welcome to the official Facebook page of Senator Angus S. King, Jr. (I-ME).\\
 nhttp://king.senate.gov\\nhttps://twitter.com/SenAngusKing\\nhttps://www.youtube.com/SenatorAngusKing\",\"username\":\"SenatorAngusSKingJr\",\"published\":true,\"communityPage\":false}";
+
+  @Test(expected=IllegalArgumentException.class)
+  public void emptyJSONTest() {
+    Class inClass = FacebookEventClassifier.detectClass(empty);
+  }
+
+  @Test
+  public void defaultDetectTest() {
+    Class inClass = FacebookEventClassifier.detectClass(post);
+    assertEquals(inClass, Post.class);
+  }
+
+  @Test
+  public void postDetectTest() {
+    Class inClass = FacebookEventClassifier.detectClass(post);
+    assertEquals(inClass, Post.class);
+  }
+
+  @Test
+  public void pageDetectTest() {
+    Class inClass = FacebookEventClassifier.detectClass(page);
+    assertEquals(inClass, Page.class);
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/SimplePageTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/SimplePageTest.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/SimplePageTest.java
index 45864ef..36d3b16 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/SimplePageTest.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/SimplePageTest.java
@@ -18,15 +18,17 @@
 
 package org.apache.streams.facebook.test;
 
-import com.fasterxml.jackson.databind.DeserializationFeature;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.facebook.Page;
 import org.apache.streams.facebook.api.FacebookPageActivitySerializer;
 import org.apache.streams.facebook.processor.FacebookTypeConverter;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -37,7 +39,9 @@ import java.io.BufferedReader;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 
-import static org.hamcrest.CoreMatchers.*;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.nullValue;
 import static org.junit.Assert.assertThat;
 
 /**
@@ -45,33 +49,33 @@ import static org.junit.Assert.assertThat;
  */
 @Ignore("ignore until test resources are available.")
 public class SimplePageTest {
-    private final static Logger LOGGER = LoggerFactory.getLogger(SimplePageTest.class);
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-    private ObjectNode event;
+  private static final Logger LOGGER = LoggerFactory.getLogger(SimplePageTest.class);
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private ObjectNode event;
 
     private static final String FACEBOOK_JSON= "{\"about\":\"The Facebook Page celebrates how our friends inspire us, support us, and help us discover the world when we connect.\",\"username\":\"facebook\",\"birthday\":\"02/04/2004\",\"category\":\"Product/Service\",\"can_checkin\":true,\"can_post\":false,\"checkins\":12,\"cover\":{\"id\":\"10154345508521729\",\"source\":\"https://scontent.xx.fbcdn.net/v/t1.0-9/s720x720/12573693_10154345508521729_8347370496501004621_n.png?oh=b75f9ec17e7e8d6c84658f5a1eb1f724&oe=58BFB505\",\"cover_id\":\"10154345508521729\",\"offset_x\":0,\"offset_y\":0},\"display_subtext\":\"12 people checked in here\",\"displayed_message_response_time\":\"AUTOMATIC\",\"engagement\":{\"count\":174813083,\"social_sentence\":\"You and 174M others like this.\"},\"fan_count\":174813083,\"founded\":\"February 4, 2004\",\"general_info\":\"Your ideas and suggestions help us to constantly improve Facebook\u2019s features. Let us know how we can improve your experience.  \\n \\n
 www.facebook.com/help/feedback\\n\",\"global_brand_root_id\":\"1499730620315598\",\"id\":\"20531316728\",\"is_community_page\":false,\"is_always_open\":false,\"is_permanently_closed\":false,\"is_published\":true,\"is_unclaimed\":false,\"is_webhooks_subscribed\":false,\"leadgen_tos_accepted\":false,\"link\":\"https://www.facebook.com/facebook/\",\"mission\":\"Founded in 2004, Facebook\u2019s mission is to give people the power to share and make the world more open and connected. People use Facebook to stay connected with friends and family, to discover what\u2019s going on in the world, and to share and express what matters to them.\",\"parking\":{\"lot\":0,\"street\":0,\"valet\":0},\"name\":\"Facebook\",\"name_with_location_descriptor\":\"Facebook\",\"overall_star_rating\":0,\"rating_count\":0,\"talking_about_count\":367210,\"voip_info\":{\"has_mobile_app\":false,\"has_permission\":false,\"is_callable\":false,\"is_callable_webrtc\":false,\"is_pushable\":false,\"reason_code\":1356044,\"rea
 son_description\":\"This person isn't available right now.\"},\"verification_status\":\"blue_verified\",\"website\":\"http://www.facebook.com\",\"were_here_count\":0,\"app_links\":{\"android\":[{\"app_name\":\"Facebook\",\"package\":\"com.facebook.katana\",\"url\":\"fb://page/20531316728\"}],\"ios\":[{\"app_name\":\"Facebook\",\"app_store_id\":\"284882215\",\"url\":\"fb://page/?id=20531316728\"}]},\"featured_video\":{\"updated_time\":\"2016-05-17T15:57:33+0000\",\"description\":\"Explore Grand Central Terminal and the stories that unfold there in the first film shot with the new Facebook Surround 360 camera. Watch the film in standard monoscopic 360 here, or find it in the Oculus Video app to watch in full 3D-360 with Gear VR.\",\"id\":\"10154659446236729\"},\"context\":{\"friends_who_like\":{\"summary\":{\"total_count\":0,\"social_sentence\":\"0 of your friends like this.\"},\"data\":[]},\"id\":\"b3Blbl9ncmFwaF9jb250ZAXh0OjIwNTMxMzE2NzI4\"},\"global_brand_page_name\":\"Facebook\",\
 "has_added_app\":false,\"is_verified\":true}\n";
 
-    private FacebookPageActivitySerializer facebookPageActivitySerializer = new FacebookPageActivitySerializer();
+  private FacebookPageActivitySerializer facebookPageActivitySerializer = new FacebookPageActivitySerializer();
 
-    @Before
-    public void setUp() throws Exception {
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+  @Before
+  public void setUp() throws Exception {
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
 
-        InputStream is = SimplePageTest.class.getResourceAsStream("/testpage.json");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+    InputStream is = SimplePageTest.class.getResourceAsStream("/testpage.json");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
 
-        event = null;
-        event = (ObjectNode) mapper.readTree(FACEBOOK_JSON);
-    }
+    event = null;
+    event = (ObjectNode) mapper.readTree(FACEBOOK_JSON);
+  }
 
-    @Test
-    public void TestSerialization() {
-        assertThat(event, is(not(nullValue())));
+  @Test
+  public void TestSerialization() {
+    assertThat(event, is(not(nullValue())));
 
-        Page page = mapper.convertValue(event, Page.class);
+    Page page = mapper.convertValue(event, Page.class);
 
         assertThat(page, is(not(nullValue())));
         assertThat(page.getAbout(), is(not(nullValue())));
@@ -79,25 +83,25 @@ public class SimplePageTest {
         assertThat(page.getTalkingAboutCount(), is(not(nullValue())));
     }
 
-    @Test
-    public void TestDeserialization() throws Exception {
-        Page page = mapper.convertValue(event, Page.class);
+  @Test
+  public void TestDeserialization() throws Exception {
+    Page page = mapper.convertValue(event, Page.class);
 
-        Activity activity = null;
-        activity = facebookPageActivitySerializer.deserialize(page);
+    Activity activity = null;
+    activity = facebookPageActivitySerializer.deserialize(page);
 
-        assertThat(activity, is(not(nullValue())));
+    assertThat(activity, is(not(nullValue())));
 
-        assertThat(activity.getActor(), is(not(nullValue())));
-        assertThat(activity.getActor().getId(), is(not(nullValue())));
-        assertThat(activity.getVerb(), is(not(nullValue())));
-        assertThat(activity.getProvider(), is(not(nullValue())));
-    }
+    assertThat(activity.getActor(), is(not(nullValue())));
+    assertThat(activity.getActor().getId(), is(not(nullValue())));
+    assertThat(activity.getVerb(), is(not(nullValue())));
+    assertThat(activity.getProvider(), is(not(nullValue())));
+  }
 
-    @Test
-    public void TestConverter() throws Exception {
-        FacebookTypeConverter converter = new FacebookTypeConverter(String.class, Activity.class);
-        converter.prepare(null);
-        converter.process(new StreamsDatum(FACEBOOK_JSON));
-    }
+  @Test
+  public void TestConverter() throws Exception {
+    FacebookTypeConverter converter = new FacebookTypeConverter(String.class, Activity.class);
+    converter.prepare(null);
+    converter.process(new StreamsDatum(FACEBOOK_JSON));
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/TestPage.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/TestPage.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/TestPage.java
index d3a00b6..620f31f 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/TestPage.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/TestPage.java
@@ -17,129 +17,129 @@
  */
 package org.apache.streams.facebook.test;
 
+import java.net.URL;
+import java.util.Date;
+
 import facebook4j.Cover;
 import facebook4j.Page;
 import facebook4j.Place;
 
-import java.net.URL;
-import java.util.Date;
-
 public class TestPage implements Page {
-    private String id;
-    private String name;
-
-    public TestPage(String id, String name) {
-        this.id = id;
-        this.name = name;
-    }
-
-    @Override
-    public String getId() {
-        return id;
-    }
-
-    @Override
-    public String getName() {
-        return name;
-    }
-
-    @Override
-    public URL getLink() {
-        return null;
-    }
-
-    @Override
-    public String getCategory() {
-        return null;
-    }
-
-    @Override
-    public Boolean isPublished() {
-        return null;
-    }
-
-    @Override
-    public Boolean canPost() {
-        return null;
-    }
-
-    @Override
-    public Integer getLikes() {
-        return null;
-    }
-
-    @Override
-    public Place.Location getLocation() {
-        return null;
-    }
-
-    @Override
-    public String getPhone() {
-        return null;
-    }
-
-    @Override
-    public Integer getCheckins() {
-        return null;
-    }
-
-    @Override
-    public URL getPicture() {
-        return null;
-    }
-
-    @Override
-    public Cover getCover() {
-        return null;
-    }
-
-    @Override
-    public String getWebsite() {
-        return null;
-    }
-
-    @Override
-    public String getCompanyOverview() {
-        return null;
-    }
-
-    @Override
-    public Integer getTalkingAboutCount() {
-        return null;
-    }
-
-    @Override
-    public String getAccessToken() {
-        return null;
-    }
-
-    @Override
-    public Boolean isCommunityPage() {
-        return null;
-    }
-
-    @Override
-    public Integer getWereHereCount() {
-        return null;
-    }
-
-    @Override
-    public Integer getFanCount() {
-        return null;
-    }
-
-    @Override
-    public Date getCreatedTime() {
-        return null;
-    }
-
-    @Override
-    public String getAbout() {
-        return null;
-    }
-
-    @Override
-    public String getUsername() {
-        return null;
-    }
+  private String id;
+  private String name;
+
+  public TestPage(String id, String name) {
+    this.id = id;
+    this.name = name;
+  }
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public String getName() {
+    return name;
+  }
+
+  @Override
+  public URL getLink() {
+    return null;
+  }
+
+  @Override
+  public String getCategory() {
+    return null;
+  }
+
+  @Override
+  public Boolean isPublished() {
+    return null;
+  }
+
+  @Override
+  public Boolean canPost() {
+    return null;
+  }
+
+  @Override
+  public Integer getLikes() {
+    return null;
+  }
+
+  @Override
+  public Place.Location getLocation() {
+    return null;
+  }
+
+  @Override
+  public String getPhone() {
+    return null;
+  }
+
+  @Override
+  public Integer getCheckins() {
+    return null;
+  }
+
+  @Override
+  public URL getPicture() {
+    return null;
+  }
+
+  @Override
+  public Cover getCover() {
+    return null;
+  }
+
+  @Override
+  public String getWebsite() {
+    return null;
+  }
+
+  @Override
+  public String getCompanyOverview() {
+    return null;
+  }
+
+  @Override
+  public Integer getTalkingAboutCount() {
+    return null;
+  }
+
+  @Override
+  public String getAccessToken() {
+    return null;
+  }
+
+  @Override
+  public Boolean isCommunityPage() {
+    return null;
+  }
+
+  @Override
+  public Integer getWereHereCount() {
+    return null;
+  }
+
+  @Override
+  public Integer getFanCount() {
+    return null;
+  }
+
+  @Override
+  public Date getCreatedTime() {
+    return null;
+  }
+
+  @Override
+  public String getAbout() {
+    return null;
+  }
+
+  @Override
+  public String getUsername() {
+    return null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivityActorSerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivityActorSerDeIT.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivityActorSerDeIT.java
index fff060a..7b50535 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivityActorSerDeIT.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivityActorSerDeIT.java
@@ -18,15 +18,17 @@
 
 package org.apache.streams.facebook.test.data;
 
+import org.apache.streams.facebook.Page;
+import org.apache.streams.facebook.api.FacebookPageActivitySerializer;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Joiner;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.input.BoundedInputStream;
-import org.apache.streams.facebook.Page;
-import org.apache.streams.facebook.api.FacebookPageActivitySerializer;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -38,29 +40,29 @@ import java.io.InputStream;
  */
 public class FacebookActivityActorSerDeIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(FacebookActivityActorSerDeIT.class);
-    private FacebookPageActivitySerializer serializer = new FacebookPageActivitySerializer();
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookActivityActorSerDeIT.class);
+  private FacebookPageActivitySerializer serializer = new FacebookPageActivitySerializer();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Test
-    public void Tests() throws Exception
-    {
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+  @Test
+  public void Tests() throws Exception
+  {
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
 
-        InputStream is = FacebookActivityActorSerDeIT.class.getResourceAsStream("/testpage.json");
-        Joiner joiner = Joiner.on(" ").skipNulls();
-        is = new BoundedInputStream(is, 10000);
-        String json;
+    InputStream is = FacebookActivityActorSerDeIT.class.getResourceAsStream("/testpage.json");
+    Joiner joiner = Joiner.on(" ").skipNulls();
+    is = new BoundedInputStream(is, 10000);
+    String json;
 
-        json = joiner.join(IOUtils.readLines(is));
-        LOGGER.debug(json);
+    json = joiner.join(IOUtils.readLines(is));
+    LOGGER.debug(json);
 
-        Page page = mapper.readValue(json, Page.class);
+    Page page = mapper.readValue(json, Page.class);
 
-        Activity activity = serializer.deserialize(page);
+    Activity activity = serializer.deserialize(page);
 
-        LOGGER.debug(mapper.writeValueAsString(activity));
-    }
+    LOGGER.debug(mapper.writeValueAsString(activity));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivitySerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivitySerDeIT.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivitySerDeIT.java
index 3d9e3e1..8112d59 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivitySerDeIT.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookActivitySerDeIT.java
@@ -18,15 +18,17 @@
 
 package org.apache.streams.facebook.test.data;
 
+import org.apache.streams.facebook.Post;
 import org.apache.streams.facebook.api.FacebookPostActivitySerializer;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Joiner;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.input.BoundedInputStream;
-import org.apache.streams.facebook.Post;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -39,35 +41,35 @@ import java.io.InputStream;
  */
 public class FacebookActivitySerDeIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(FacebookActivitySerDeIT.class);
-    private FacebookPostActivitySerializer serializer = new FacebookPostActivitySerializer();
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookActivitySerDeIT.class);
+  private FacebookPostActivitySerializer serializer = new FacebookPostActivitySerializer();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Test
-    public void Tests()
-    {
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+  @Test
+  public void Tests()
+  {
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
 
-        InputStream is = FacebookActivitySerDeIT.class.getResourceAsStream("/testpost.json");
-        Joiner joiner = Joiner.on(" ").skipNulls();
-        is = new BoundedInputStream(is, 10000);
-        String json;
+    InputStream is = FacebookActivitySerDeIT.class.getResourceAsStream("/testpost.json");
+    Joiner joiner = Joiner.on(" ").skipNulls();
+    is = new BoundedInputStream(is, 10000);
+    String json;
 
-        try {
-            json = joiner.join(IOUtils.readLines(is));
-            LOGGER.debug(json);
+    try {
+      json = joiner.join(IOUtils.readLines(is));
+      LOGGER.debug(json);
 
-            Post post = mapper.readValue(json, Post.class);
+      Post post = mapper.readValue(json, Post.class);
 
-            Activity activity = serializer.deserialize(post);
+      Activity activity = serializer.deserialize(post);
 
-            LOGGER.debug(mapper.writeValueAsString(activity));
+      LOGGER.debug(mapper.writeValueAsString(activity));
 
-        } catch( Exception e ) {
-            LOGGER.error("Exception: ", e);
-            Assert.fail();
-        }
+    } catch( Exception e ) {
+      LOGGER.error("Exception: ", e);
+      Assert.fail();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPageSerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPageSerDeIT.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPageSerDeIT.java
index 3499a67..2a2ef6e 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPageSerDeIT.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPageSerDeIT.java
@@ -18,13 +18,15 @@
 
 package org.apache.streams.facebook.test.data;
 
+import org.apache.streams.facebook.Page;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Joiner;
+
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.input.BoundedInputStream;
-import org.apache.streams.facebook.Page;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -37,40 +39,40 @@ import java.io.InputStream;
  */
 public class FacebookPageSerDeIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(FacebookPageSerDeIT.class);
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageSerDeIT.class);
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Test
-    public void Tests()
-    {
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+  @Test
+  public void Tests()
+  {
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
 
-        InputStream is = FacebookPageSerDeIT.class.getResourceAsStream("/testpage.json");
-        Joiner joiner = Joiner.on(" ").skipNulls();
-        is = new BoundedInputStream(is, 10000);
-        String json;
+    InputStream is = FacebookPageSerDeIT.class.getResourceAsStream("/testpage.json");
+    Joiner joiner = Joiner.on(" ").skipNulls();
+    is = new BoundedInputStream(is, 10000);
+    String json;
 
-        try {
-            json = joiner.join(IOUtils.readLines(is));
-            LOGGER.debug(json);
+    try {
+      json = joiner.join(IOUtils.readLines(is));
+      LOGGER.debug(json);
 
-            Page ser = mapper.readValue(json, Page.class);
+      Page ser = mapper.readValue(json, Page.class);
 
-            String de = mapper.writeValueAsString(ser);
+      String de = mapper.writeValueAsString(ser);
 
-            LOGGER.debug(de);
+      LOGGER.debug(de);
 
-            Page serde = mapper.readValue(de, Page.class);
+      Page serde = mapper.readValue(de, Page.class);
 
-            Assert.assertEquals(ser, serde);
+      Assert.assertEquals(ser, serde);
 
-            LOGGER.debug(mapper.writeValueAsString(serde));
+      LOGGER.debug(mapper.writeValueAsString(serde));
 
-        } catch( Exception e ) {
-            LOGGER.error("Exception: ", e);
-            Assert.fail();
-        }
+    } catch( Exception e ) {
+      LOGGER.error("Exception: ", e);
+      Assert.fail();
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPostSerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPostSerDeIT.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPostSerDeIT.java
index 1c9a620..3ae6749 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPostSerDeIT.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/data/FacebookPostSerDeIT.java
@@ -40,60 +40,60 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
 /**
- * Tests serialization of Facebook Post inputs.
+ * Tests serialization of Facebook Post inputs
  */
 public class FacebookPostSerDeIT {
 
-  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPostSerDeIT.class);
-  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+    private final static Logger LOGGER = LoggerFactory.getLogger(FacebookPostSerDeIT.class);
+    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-  @Test
-  public void Tests()
-  {
-    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
-    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+    @Test
+    public void Tests()
+    {
+        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
+        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
 
-    InputStream is = FacebookPostSerDeIT.class.getResourceAsStream("/testpost.json");
-    Joiner joiner = Joiner.on(" ").skipNulls();
-    is = new BoundedInputStream(is, 10000);
-    String json;
+        InputStream is = FacebookPostSerDeIT.class.getResourceAsStream("/testpost.json");
+        Joiner joiner = Joiner.on(" ").skipNulls();
+        is = new BoundedInputStream(is, 10000);
+        String json;
 
-    try {
-      json = joiner.join(IOUtils.readLines(is));
-      LOGGER.debug(json);
+        try {
+            json = joiner.join(IOUtils.readLines(is));
+            LOGGER.debug(json);
 
-      Post ser = mapper.readValue(json, Post.class);
+            Post ser = mapper.readValue(json, Post.class);
 
-      String de = mapper.writeValueAsString(ser);
+            String de = mapper.writeValueAsString(ser);
 
-      LOGGER.debug(de);
+            LOGGER.debug(de);
 
-      Post serde = mapper.readValue(de, Post.class);
+            Post serde = mapper.readValue(de, Post.class);
 
-      assertEquals(ser, serde);
+            assertEquals(ser, serde);
 
-      LOGGER.debug(mapper.writeValueAsString(serde));
+            LOGGER.debug(mapper.writeValueAsString(serde));
 
-      Activity activity = new Activity();
-      FacebookActivityUtil.updateActivity(ser, activity);
+            Activity activity = new Activity();
+            FacebookActivityUtil.updateActivity(ser, activity);
 
-      assertNotNull(activity);
-      assertNotNull(activity.getActor().getId());
-      assertNotNull(activity.getActor().getDisplayName());
-      assertNotNull(activity.getId());
-      assert(activity.getVerb().equals("post"));
-      assertNotNull(activity.getObject());
-      assertNotNull(activity.getUpdated());
-      assertNotNull(activity.getPublished());
-      assertEquals(activity.getProvider().getId(), "id:providers:facebook");
-      assertEquals(activity.getProvider().getDisplayName(), "Facebook");
-      assertEquals(activity.getLinks().size(), 1);
-      assertNotNull(activity.getAdditionalProperties().get("facebook"));
+            assertNotNull(activity);
+            assertNotNull(activity.getActor().getId());
+            assertNotNull(activity.getActor().getDisplayName());
+            assertNotNull(activity.getId());
+            assert(activity.getVerb().equals("post"));
+            assertNotNull(activity.getObject());
+            assertNotNull(activity.getUpdated());
+            assertNotNull(activity.getPublished());
+            assertEquals(activity.getProvider().getId(), "id:providers:facebook");
+            assertEquals(activity.getProvider().getDisplayName(), "Facebook");
+            assertEquals(activity.getLinks().size(), 1);
+            assertNotNull(activity.getAdditionalProperties().get("facebook"));
 
-    } catch( Exception e ) {
-      LOGGER.error("Exception: ", e);
-      Assert.fail();
+        } catch( Exception e ) {
+            LOGGER.error("Exception: ", e);
+            Assert.fail();
+        }
     }
-  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/TestFacebookProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/TestFacebookProvider.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/TestFacebookProvider.java
index 8416186..929366d 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/TestFacebookProvider.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/TestFacebookProvider.java
@@ -22,74 +22,78 @@ import org.apache.streams.facebook.FacebookConfiguration;
 import org.apache.streams.facebook.IdConfig;
 import org.apache.streams.facebook.provider.FacebookDataCollector;
 import org.apache.streams.facebook.provider.FacebookProvider;
+
 import org.junit.Test;
 
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.BrokenBarrierException;
 import java.util.concurrent.CyclicBarrier;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  * Unit Tests For {@link org.apache.streams.facebook.provider.FacebookProvider}
  */
 public class TestFacebookProvider {
 
-    @Test
-    public void testFacebookProvider() throws Exception {
-        //Test that streams starts and shut downs.
-        final CyclicBarrier barrier = new CyclicBarrier(2);
-        FacebookProvider provider = new FacebookProvider(new FacebookConfiguration()) {
-            @Override
-            protected FacebookDataCollector getDataCollector() {
-                return new TestFacebookDataCollector(barrier, super.configuration, super.datums);
-            }
-        };
-        provider.prepare(null);
-        provider.startStream();
-        assertTrue(provider.isRunning());
-        barrier.await();
-        assertTrue(provider.isRunning());
-        assertEquals(5, provider.readCurrent().size());
-        barrier.await();
-        assertEquals(0, provider.readCurrent().size());
-        assertFalse(provider.isRunning());
-        provider.cleanUp();
-    }
+  @Test
+  public void testFacebookProvider() throws Exception {
+    //Test that streams starts and shut downs.
+    final CyclicBarrier barrier = new CyclicBarrier(2);
+    FacebookProvider provider = new FacebookProvider(new FacebookConfiguration()) {
+      @Override
+      protected FacebookDataCollector getDataCollector() {
+        return new TestFacebookDataCollector(barrier, super.configuration, super.datums);
+      }
+    };
+    provider.prepare(null);
+    provider.startStream();
+    assertTrue(provider.isRunning());
+    barrier.await();
+    assertTrue(provider.isRunning());
+    assertEquals(5, provider.readCurrent().size());
+    barrier.await();
+    assertEquals(0, provider.readCurrent().size());
+    assertFalse(provider.isRunning());
+    provider.cleanUp();
+  }
 
-    private class TestFacebookDataCollector extends FacebookDataCollector {
+  private class TestFacebookDataCollector extends FacebookDataCollector {
 
-        private CyclicBarrier barrier;
-        private BlockingQueue<StreamsDatum> queue;
+    private CyclicBarrier barrier;
+    private BlockingQueue<StreamsDatum> queue;
 
-        public TestFacebookDataCollector(CyclicBarrier barrier, FacebookConfiguration config, BlockingQueue<StreamsDatum> queue) {
-            super(config, queue);
-            this.barrier = barrier;
-            this.queue = queue;
+    public TestFacebookDataCollector(CyclicBarrier barrier, FacebookConfiguration config, BlockingQueue<StreamsDatum> queue) {
+      super(config, queue);
+      this.barrier = barrier;
+      this.queue = queue;
 
-        }
+    }
 
-        @Override
-        protected void getData(IdConfig id) throws Exception {
+    @Override
+    protected void getData(IdConfig id) throws Exception {
 
-        }
+    }
 
-        @Override
-        public void run() {
-            try {
-                for(int i=0; i < 5; ++i) {
-                    super.outputData(new Integer(i), ""+i);
-                }
-                this.barrier.await();
-                super.isComplete.set(true);
-                this.barrier.await();
-            } catch (InterruptedException e) {
-                Thread.currentThread().interrupt();
-            } catch (BrokenBarrierException bbe) {
-                fail();
-            }
+    @Override
+    public void run() {
+      try {
+        for(int i=0; i < 5; ++i) {
+          super.outputData(new Integer(i), ""+i);
         }
+        this.barrier.await();
+        super.isComplete.set(true);
+        this.barrier.await();
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();
+      } catch (BrokenBarrierException bbe) {
+        fail();
+      }
     }
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/page/FacebookPageProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/page/FacebookPageProviderIT.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/page/FacebookPageProviderIT.java
index 43dcb1f..f78f2e5 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/page/FacebookPageProviderIT.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/page/FacebookPageProviderIT.java
@@ -18,8 +18,10 @@
 
 package org.apache.streams.facebook.test.providers.page;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.facebook.provider.page.FacebookPageProvider;
+
+import com.google.common.collect.Lists;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -30,43 +32,43 @@ import java.io.LineNumberReader;
 
 public class FacebookPageProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageProviderIT.class);
 
-    @Test
-    public void testFacebookPageProvider() throws Exception {
+  @Test
+  public void testFacebookPageProvider() throws Exception {
 
-        String configfile = "./target/test-classes/FacebookPageProviderIT.conf";
-        String outfile = "./target/test-classes/FacebookPageProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/FacebookPageProviderIT.conf";
+    String outfile = "./target/test-classes/FacebookPageProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                FacebookPageProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        FacebookPageProvider.main(args);
+      } catch( Exception e ) {
+        LOGGER.error("Test Exception!", e);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        FacebookPageProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
+    FacebookPageProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while(outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 1);
+    assert (outCounter.getLineNumber() >= 1);
 
-    }
+  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/pagefeed/FacebookPageFeedProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/pagefeed/FacebookPageFeedProviderIT.java b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/pagefeed/FacebookPageFeedProviderIT.java
index 4ddb83b..0aa4ad3 100644
--- a/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/pagefeed/FacebookPageFeedProviderIT.java
+++ b/streams-contrib/streams-provider-facebook/src/test/java/org/apache/streams/facebook/test/providers/pagefeed/FacebookPageFeedProviderIT.java
@@ -19,6 +19,7 @@
 package org.apache.streams.facebook.test.providers.pagefeed;
 
 import org.apache.streams.facebook.provider.pagefeed.FacebookPageFeedProvider;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -29,39 +30,39 @@ import java.io.LineNumberReader;
 
 public class FacebookPageFeedProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageFeedProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageFeedProviderIT.class);
 
-    @Test
-    public void testFacebookPageFeedProvider() throws Exception {
+  @Test
+  public void testFacebookPageFeedProvider() throws Exception {
 
-        String configfile = "./target/test-classes/FacebookPageFeedProviderIT.conf";
-        String outfile = "./target/test-classes/FacebookPageFeedProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/FacebookPageFeedProviderIT.conf";
+    String outfile = "./target/test-classes/FacebookPageFeedProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                FacebookPageFeedProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        FacebookPageFeedProvider.main(args);
+      } catch( Exception e ) {
+        LOGGER.error("Test Exception!", e);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while(outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 1000);
+    assert (outCounter.getLineNumber() >= 1000);
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailImapProviderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailImapProviderTask.java b/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailImapProviderTask.java
index 2beeb88..a0aac9b 100644
--- a/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailImapProviderTask.java
+++ b/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailImapProviderTask.java
@@ -18,47 +18,46 @@
 
 package com.google.gmail.provider;
 
-import com.googlecode.gmail4j.GmailClient;
-import com.googlecode.gmail4j.GmailMessage;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.util.ComponentUtils;
+
+import com.googlecode.gmail4j.GmailMessage;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.Calendar;
-import java.util.GregorianCalendar;
 import java.util.List;
 
 /**
- * Created by sblackmon on 12/10/13.
+ * GMailImapProviderTask collects Gmail via IMAP driver.
  */
 public class GMailImapProviderTask implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(GMailImapProviderTask.class);
+  private final static Logger LOGGER = LoggerFactory.getLogger(GMailImapProviderTask.class);
 
-    private GMailProvider provider;
+  private GMailProvider provider;
 
-    public GMailImapProviderTask(GMailProvider provider) {
-        this.provider = provider;
-    }
-
-    @Override
-    public void run() {
+  public GMailImapProviderTask(GMailProvider provider) {
+    this.provider = provider;
+  }
 
-        final List<GmailMessage> messages = this.provider.imapClient.getUnreadMessages();
+  @Override
+  public void run() {
 
-        for (GmailMessage message : messages) {
+    final List<GmailMessage> messages = this.provider.imapClient.getUnreadMessages();
 
-            Activity activity;
-            GMailMessageActivitySerializer serializer = new GMailMessageActivitySerializer( this.provider );
-            activity = serializer.deserialize(message);
-            StreamsDatum entry = new StreamsDatum(activity);
-            ComponentUtils.offerUntilSuccess(entry, this.provider.providerQueue);
+    for (GmailMessage message : messages) {
 
-        }
+      Activity activity;
+      GMailMessageActivitySerializer serializer = new GMailMessageActivitySerializer( this.provider );
+      activity = serializer.deserialize(message);
+      StreamsDatum entry = new StreamsDatum(activity);
+      ComponentUtils.offerUntilSuccess(entry, this.provider.providerQueue);
 
     }
 
+  }
+
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailMessageActivitySerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailMessageActivitySerializer.java b/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailMessageActivitySerializer.java
index e2b5501..acc745d 100644
--- a/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailMessageActivitySerializer.java
+++ b/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailMessageActivitySerializer.java
@@ -18,6 +18,14 @@
 
 package com.google.gmail.provider;
 
+import org.apache.streams.data.ActivitySerializer;
+import org.apache.streams.pojo.extensions.ExtensionUtil;
+import org.apache.streams.pojo.json.Activity;
+import org.apache.streams.pojo.json.ActivityObject;
+import org.apache.streams.pojo.json.Generator;
+import org.apache.streams.pojo.json.Icon;
+import org.apache.streams.pojo.json.Provider;
+
 import com.fasterxml.jackson.annotation.JsonBackReference;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonManagedReference;
@@ -33,85 +41,83 @@ import com.googlecode.gmail4j.javamail.JavaMailGmailMessage;
 import com.sun.mail.imap.IMAPFolder;
 import com.sun.mail.imap.IMAPMessage;
 import com.sun.mail.imap.IMAPSSLStore;
+
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.streams.data.ActivitySerializer;
-import org.apache.streams.pojo.extensions.ExtensionUtil;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
-import org.apache.streams.pojo.json.Generator;
-import org.apache.streams.pojo.json.Icon;
-import org.apache.streams.pojo.json.Provider;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.mail.internet.MimeMultipart;
 import java.util.List;
 import java.util.Map;
 
-public class GMailMessageActivitySerializer implements ActivitySerializer<GmailMessage> {
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(GMailMessageActivitySerializer.class);
-
-    private GMailProvider provider;
-
-    public GMailMessageActivitySerializer(GMailProvider provider) {
-
-        this.provider = provider;
-
-        ObjectMapper mapper = new ObjectMapper();
-        mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, Boolean.FALSE);
-
-        mapper.addMixInAnnotations(IMAPSSLStore.class, MessageMixIn.class);
-        mapper.addMixInAnnotations(IMAPFolder.class, MessageMixIn.class);
-        mapper.addMixInAnnotations(IMAPMessage.class, MessageMixIn.class);
-        mapper.addMixInAnnotations(MimeMultipart.class, MessageMixIn.class);
-        mapper.addMixInAnnotations(JavaMailGmailMessage.class, MessageMixIn.class);
-
-    }
+import javax.mail.internet.MimeMultipart;
 
-    public GMailMessageActivitySerializer() {
-    }
+/**
+ * GMailMessageActivitySerializer converts a GMail message to Activity.
+ */
+public class GMailMessageActivitySerializer implements ActivitySerializer<GmailMessage> {
 
-    @Override
-    public String serializationFormat() {
-        return "gmail.v1";
+  private static final Logger LOGGER = LoggerFactory.getLogger(GMailMessageActivitySerializer.class);
+
+  private GMailProvider provider;
+
+  public GMailMessageActivitySerializer(GMailProvider provider) {
+
+    this.provider = provider;
+
+    ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, Boolean.FALSE);
+
+    mapper.addMixInAnnotations(IMAPSSLStore.class, MessageMixIn.class);
+    mapper.addMixInAnnotations(IMAPFolder.class, MessageMixIn.class);
+    mapper.addMixInAnnotations(IMAPMessage.class, MessageMixIn.class);
+    mapper.addMixInAnnotations(MimeMultipart.class, MessageMixIn.class);
+    mapper.addMixInAnnotations(JavaMailGmailMessage.class, MessageMixIn.class);
+
+  }
+
+  public GMailMessageActivitySerializer() {
+  }
+
+  @Override
+  public String serializationFormat() {
+    return "gmail.v1";
+  }
+
+  @Override
+  public GmailMessage serialize(Activity activity) {
+    return null;
+  }
+
+  @Override
+  public Activity deserialize(GmailMessage gmailMessage) {
+
+    Activity activity = new Activity();
+    activity.setId(formatId(this.provider.getConfig().getUserName(), String.valueOf(gmailMessage.getMessageNumber())));
+    activity.setPublished(new DateTime(gmailMessage.getSendDate()));
+    Provider provider = new Provider();
+    provider.setId("http://gmail.com");
+    provider.setDisplayName("GMail");
+    activity.setProvider(provider);
+    ActivityObject actor = new ActivityObject();
+    actor.setId(gmailMessage.getFrom().getEmail());
+    actor.setDisplayName(gmailMessage.getFrom().getName());
+    activity.setActor(actor);
+    activity.setVerb("email");
+    ActivityObject object = new ActivityObject();
+    try {
+      object.setId(gmailMessage.getTo().get(0).getEmail());
+      object.setDisplayName(gmailMessage.getTo().get(0).getName());
+    } catch( GmailException e ) {
+      LOGGER.warn(e.getMessage());
     }
-
-    @Override
-    public GmailMessage serialize(Activity activity) {
-        return null;
+    activity.setTitle(gmailMessage.getSubject());
+    try {
+      activity.setContent(gmailMessage.getContentText());
+    } catch( GmailException e ) {
+      LOGGER.warn(e.getMessage());
     }
-
-    @Override
-    public Activity deserialize(GmailMessage gmailMessage) {
-
-        Activity activity = new Activity();
-        activity.setId(formatId(this.provider.getConfig().getUserName(), String.valueOf(gmailMessage.getMessageNumber())));
-        activity.setPublished(new DateTime(gmailMessage.getSendDate()));
-        Provider provider = new Provider();
-        provider.setId("http://gmail.com");
-        provider.setDisplayName("GMail");
-        activity.setProvider(provider);
-        ActivityObject actor = new ActivityObject();
-        actor.setId(gmailMessage.getFrom().getEmail());
-        actor.setDisplayName(gmailMessage.getFrom().getName());
-        activity.setActor(actor);
-        activity.setVerb("email");
-        ActivityObject object = new ActivityObject();
-        try {
-            object.setId(gmailMessage.getTo().get(0).getEmail());
-            object.setDisplayName(gmailMessage.getTo().get(0).getName());
-        } catch( GmailException e ) {
-            LOGGER.warn(e.getMessage());
-        }
-        activity.setTitle(gmailMessage.getSubject());
-        try {
-            activity.setContent(gmailMessage.getContentText());
-        } catch( GmailException e ) {
-            LOGGER.warn(e.getMessage());
-        }
-        activity.setObject(object);
+    activity.setObject(object);
 
 //        try {
 //            // if jackson can't serialize the object, find out now
@@ -126,72 +132,72 @@ public class GMailMessageActivitySerializer implements ActivitySerializer<GmailM
 //            e.printStackTrace();
 //        }
 
-        return activity;
-    }
-
-    @Override
-    public List<Activity> deserializeAll(List<GmailMessage> serializedList) {
-        throw new NotImplementedException("Not currently implemented");
-    }
-
-    public Activity convert(ObjectNode event) {
-        return null;
-    }
-
-    public static Generator buildGenerator(ObjectNode event) {
-        return null;
-    }
-
-    public static Icon getIcon(ObjectNode event) {
-        return null;
-    }
-
-    public static Provider buildProvider(ObjectNode event) {
-        Provider provider = new Provider();
-        provider.setId("id:providers:gmail");
-        return provider;
-    }
-
-    public static List<Object> getLinks(ObjectNode event) {
-        return null;
-    }
-
-    public static String getUrls(ObjectNode event) {
-        return null;
-    }
-
-    public static void addGMailExtension(Activity activity, GmailMessage gmailMessage) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-        extensions.put("gmail", gmailMessage);
-    }
-
-    public static String formatId(String... idparts) {
-        return Joiner.on(":").join(Lists.asList("id:gmail", idparts));
-    }
-
-    interface MessageMixIn {
-        @JsonManagedReference
-        @JsonIgnore
-        IMAPSSLStore getDefaultFolder(); // we don't need it!
-        @JsonManagedReference
-        @JsonIgnore
-        IMAPSSLStore getPersonalNamespaces(); // we don't need it!
-        @JsonManagedReference
-        @JsonIgnore
-        IMAPFolder getStore(); // we don't need it!
-        //        @JsonManagedReference
+    return activity;
+  }
+
+  @Override
+  public List<Activity> deserializeAll(List<GmailMessage> serializedList) {
+    throw new NotImplementedException("Not currently implemented");
+  }
+
+  public Activity convert(ObjectNode event) {
+    return null;
+  }
+
+  public static Generator buildGenerator(ObjectNode event) {
+    return null;
+  }
+
+  public static Icon getIcon(ObjectNode event) {
+    return null;
+  }
+
+  public static Provider buildProvider(ObjectNode event) {
+    Provider provider = new Provider();
+    provider.setId("id:providers:gmail");
+    return provider;
+  }
+
+  public static List<Object> getLinks(ObjectNode event) {
+    return null;
+  }
+
+  public static String getUrls(ObjectNode event) {
+    return null;
+  }
+
+  public static void addGMailExtension(Activity activity, GmailMessage gmailMessage) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+    extensions.put("gmail", gmailMessage);
+  }
+
+  public static String formatId(String... idparts) {
+    return Joiner.on(":").join(Lists.asList("id:gmail", idparts));
+  }
+
+  interface MessageMixIn {
+    @JsonManagedReference
+    @JsonIgnore
+    IMAPSSLStore getDefaultFolder(); // we don't need it!
+    @JsonManagedReference
+    @JsonIgnore
+    IMAPSSLStore getPersonalNamespaces(); // we don't need it!
+    @JsonManagedReference
+    @JsonIgnore
+    IMAPFolder getStore(); // we don't need it!
+    //        @JsonManagedReference
 //        @JsonIgnore
 //        @JsonBackReference
-        //IMAPFolder getParent(); // we don't need it!
-        @JsonManagedReference
-        @JsonIgnore
-        @JsonBackReference
-        IMAPMessage getFolder(); // we don't need it!
-        @JsonManagedReference
-        @JsonIgnore
-        @JsonProperty("parent")
-        @JsonBackReference
-        MimeMultipart getParent();
-    }
+    //IMAPFolder getParent(); // we don't need it!
+    @JsonManagedReference
+    @JsonIgnore
+    @JsonBackReference
+    IMAPMessage getFolder(); // we don't need it!
+    @JsonManagedReference
+    @JsonIgnore
+    @JsonProperty("parent")
+    @JsonBackReference
+    MimeMultipart getParent();
+  }
 
 }


[25/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverJsonActivitySerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverJsonActivitySerializer.java b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverJsonActivitySerializer.java
index 17fde37..91d487e 100644
--- a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverJsonActivitySerializer.java
+++ b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverJsonActivitySerializer.java
@@ -18,6 +18,9 @@
 
 package org.apache.streams.moreover;
 
+import org.apache.streams.data.ActivitySerializer;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.AnnotationIntrospector;
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -25,9 +28,6 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.fasterxml.jackson.module.jaxb.JaxbAnnotationIntrospector;
 import com.moreover.api.Article;
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.streams.data.ActivitySerializer;
-import org.apache.streams.moreover.MoreoverUtils;
-import org.apache.streams.pojo.json.Activity;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -35,64 +35,64 @@ import java.io.IOException;
 import java.util.List;
 
 /**
- * Deserializes Moreover JSON format into Activities
+ * Deserializes Moreover JSON format into Activities.
  */
 public class MoreoverJsonActivitySerializer implements ActivitySerializer<String> {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(MoreoverJsonActivitySerializer.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(MoreoverJsonActivitySerializer.class);
 
-    public MoreoverJsonActivitySerializer() {
-    }
+  public MoreoverJsonActivitySerializer() {
+  }
 
-    @Override
-    public String serializationFormat() {
-        return "application/json+vnd.moreover.com.v1";
-    }
+  @Override
+  public String serializationFormat() {
+    return "application/json+vnd.moreover.com.v1";
+  }
 
-    @Override
-    public String serialize(Activity deserialized) {
-        throw new UnsupportedOperationException("Cannot currently serialize to Moreover JSON");
-    }
+  @Override
+  public String serialize(Activity deserialized) {
+    throw new UnsupportedOperationException("Cannot currently serialize to Moreover JSON");
+  }
 
-    @Override
-    public Activity deserialize(String serialized) {
-        serialized = serialized.replaceAll("\\[[ ]*\\]", "null");
-
-        LOGGER.debug(serialized);
-
-        ObjectMapper mapper = new ObjectMapper();
-        AnnotationIntrospector introspector = new JaxbAnnotationIntrospector(mapper.getTypeFactory());
-        mapper.setAnnotationIntrospector(introspector);
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.FALSE);
-        mapper.configure(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE, Boolean.FALSE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.WRAP_EXCEPTIONS, Boolean.TRUE);
-
-        Article article;
-        try {
-            ObjectNode node = (ObjectNode)mapper.readTree(serialized);
-            node.remove("tags");
-            node.remove("locations");
-            node.remove("companies");
-            node.remove("topics");
-            node.remove("media");
-            node.remove("outboundUrls");
-            ObjectNode jsonNodes = (ObjectNode) node.get("source").get("feed");
-            jsonNodes.remove("editorialTopics");
-            jsonNodes.remove("tags");
-            jsonNodes.remove("autoTopics");
-            article = mapper.convertValue(node, Article.class);
-        } catch (IOException e) {
-            throw new IllegalArgumentException("Unable to deserialize", e);
-        }
-        return MoreoverUtils.convert(article);
-    }
+  @Override
+  public Activity deserialize(String serialized) {
+    serialized = serialized.replaceAll("\\[[ ]*\\]", "null");
 
-    @Override
-    public List<Activity> deserializeAll(List<String> serializedList) {
-        throw new NotImplementedException("Not currently implemented");
+    LOGGER.debug(serialized);
+
+    ObjectMapper mapper = new ObjectMapper();
+    AnnotationIntrospector introspector = new JaxbAnnotationIntrospector(mapper.getTypeFactory());
+    mapper.setAnnotationIntrospector(introspector);
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.FALSE);
+    mapper.configure(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE, Boolean.FALSE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.WRAP_EXCEPTIONS, Boolean.TRUE);
+
+    Article article;
+    try {
+      ObjectNode node = (ObjectNode)mapper.readTree(serialized);
+      node.remove("tags");
+      node.remove("locations");
+      node.remove("companies");
+      node.remove("topics");
+      node.remove("media");
+      node.remove("outboundUrls");
+      ObjectNode jsonNodes = (ObjectNode) node.get("source").get("feed");
+      jsonNodes.remove("editorialTopics");
+      jsonNodes.remove("tags");
+      jsonNodes.remove("autoTopics");
+      article = mapper.convertValue(node, Article.class);
+    } catch (IOException ex) {
+      throw new IllegalArgumentException("Unable to deserialize", ex);
     }
+    return MoreoverUtils.convert(article);
+  }
+
+  @Override
+  public List<Activity> deserializeAll(List<String> serializedList) {
+    throw new NotImplementedException("Not currently implemented");
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProvider.java b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProvider.java
index 78d8e9d..2ab6ee4 100644
--- a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProvider.java
+++ b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProvider.java
@@ -18,14 +18,6 @@
 
 package org.apache.streams.moreover;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.*;
-import com.google.common.util.concurrent.Uninterruptibles;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
@@ -33,6 +25,17 @@ import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Queues;
+import com.google.common.util.concurrent.Uninterruptibles;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,136 +45,155 @@ import java.io.File;
 import java.io.FileOutputStream;
 import java.io.PrintStream;
 import java.math.BigInteger;
-import java.util.*;
-import java.util.concurrent.*;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Queue;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
 
 /**
- * Streams Provider for the Moreover Metabase API
- *
- *  To use from command line:
- *
- *  Supply configuration similar to src/test/resources/rss.conf
- *
- *  Launch using:
- *
- *  mvn exec:java -Dexec.mainClass=org.apache.streams.moreover.MoreoverProvider -Dexec.args="rss.conf articles.json"
+ * Streams Provider for the Moreover Metabase API.
  */
 public class MoreoverProvider implements StreamsProvider {
 
-    public final static String STREAMS_ID = "MoreoverProvider";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(MoreoverProvider.class);
-
-    protected volatile Queue<StreamsDatum> providerQueue = new ConcurrentLinkedQueue<StreamsDatum>();
+  public static final String STREAMS_ID = "MoreoverProvider";
 
-    private List<MoreoverKeyData> keys;
+  private static final Logger LOGGER = LoggerFactory.getLogger(MoreoverProvider.class);
 
-    private MoreoverConfiguration config;
+  protected volatile Queue<StreamsDatum> providerQueue = new ConcurrentLinkedQueue<StreamsDatum>();
 
-    private ExecutorService executor;
+  private List<MoreoverKeyData> keys;
 
-    public MoreoverProvider(MoreoverConfiguration moreoverConfiguration) {
-        this.config = moreoverConfiguration;
-        this.keys = Lists.newArrayList();
-        for( MoreoverKeyData apiKey : config.getApiKeys()) {
-            this.keys.add(apiKey);
-        }
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    public void startStream() {
+  private MoreoverConfiguration config;
 
-        for(MoreoverKeyData key : keys) {
-            MoreoverProviderTask task = new MoreoverProviderTask(key.getId(), key.getKey(), this.providerQueue, key.getStartingSequence());
-            executor.submit(new Thread(task));
-            LOGGER.info("Started producer for {}", key.getKey());
-        }
+  private ExecutorService executor;
 
+  /**
+   * MoreoverProvider constructor.
+   * @param moreoverConfiguration MoreoverConfiguration
+   */
+  public MoreoverProvider(MoreoverConfiguration moreoverConfiguration) {
+    this.config = moreoverConfiguration;
+    this.keys = Lists.newArrayList();
+    for ( MoreoverKeyData apiKey : config.getApiKeys()) {
+      this.keys.add(apiKey);
     }
+  }
 
-    @Override
-    public synchronized StreamsResultSet readCurrent() {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        LOGGER.debug("readCurrent: {}", providerQueue.size());
-
-        Collection<StreamsDatum> currentIterator = Lists.newArrayList();
-        Iterators.addAll(currentIterator, providerQueue.iterator());
-
-        StreamsResultSet current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(currentIterator));
-
-        providerQueue.clear();
-
-        return current;
-    }
+  @Override
+  public void startStream() {
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
+    for (MoreoverKeyData key : keys) {
+      MoreoverProviderTask task = new MoreoverProviderTask(key.getId(), key.getKey(), this.providerQueue, key.getStartingSequence());
+      executor.submit(new Thread(task));
+      LOGGER.info("Started producer for {}", key.getKey());
     }
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
-
-    @Override
-    public boolean isRunning() {
-        return !executor.isShutdown() && !executor.isTerminated();
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        LOGGER.debug("Prepare");
-        executor = Executors.newSingleThreadExecutor();
-    }
-
-    @Override
-    public void cleanUp() {
-
-    }
-
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        MoreoverConfiguration config = new ComponentConfigurator<>(MoreoverConfiguration.class).detectConfiguration(typesafe, "rss");
-        MoreoverProvider provider = new MoreoverProvider(config);
-
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = mapper.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+  }
+
+  @Override
+  public synchronized StreamsResultSet readCurrent() {
+
+    LOGGER.debug("readCurrent: {}", providerQueue.size());
+
+    Collection<StreamsDatum> currentIterator = Lists.newArrayList();
+    Iterators.addAll(currentIterator, providerQueue.iterator());
+
+    StreamsResultSet current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(currentIterator));
+
+    providerQueue.clear();
+
+    return current;
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return !executor.isShutdown() && !executor.isTerminated();
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    LOGGER.debug("Prepare");
+    executor = Executors.newSingleThreadExecutor();
+  }
+
+  @Override
+  public void cleanUp() {
+
+  }
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply configuration similar to src/test/resources/rss.conf
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.moreover.MoreoverProvider -Dexec.args="rss.conf articles.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    MoreoverConfiguration config = new ComponentConfigurator<>(MoreoverConfiguration.class).detectConfiguration(typesafe, "rss");
+    MoreoverProvider provider = new MoreoverProvider(config);
+
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          json = mapper.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
+        }
+      }
     }
+    while ( provider.isRunning() );
+    provider.cleanUp();
+    outStream.flush();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProviderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProviderTask.java b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProviderTask.java
index ad92d73..88aec81 100644
--- a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProviderTask.java
+++ b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverProviderTask.java
@@ -18,74 +18,84 @@
 
 package org.apache.streams.moreover;
 
-import com.google.common.collect.ImmutableSet;
 import org.apache.streams.core.StreamsDatum;
+
+import com.google.common.collect.ImmutableSet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Queue;
 
 /**
- * Task to pull from the Morever API
+ * Task that pulls from the Morever API on behalf of MoreoverProvider.
  */
 public class MoreoverProviderTask implements Runnable {
 
-    public static final int LATENCY = 10;
-    public static final int REQUIRED_LATENCY = LATENCY * 1000;
-    private static Logger logger = LoggerFactory.getLogger(MoreoverProviderTask.class);
-
-    private String lastSequence;
-    private final String apiKey;
-    private final String apiId;
-    private final Queue<StreamsDatum> results;
-    private final MoreoverClient moClient;
-    private boolean started = false;
+  public static final int LATENCY = 10;
+  public static final int REQUIRED_LATENCY = LATENCY * 1000;
+  private static Logger logger = LoggerFactory.getLogger(MoreoverProviderTask.class);
 
-    public MoreoverProviderTask(String apiId, String apiKey, Queue<StreamsDatum> results, String lastSequence) {
-        //logger.info("Constructed new task {} for {} {} {}", UUID.randomUUID().toString(), apiId, apiKey, lastSequence);
-        this.apiId = apiId;
-        this.apiKey = apiKey;
-        this.results = results;
-        this.lastSequence = lastSequence;
-        this.moClient = new MoreoverClient(this.apiId, this.apiKey, this.lastSequence);
-        initializeClient(moClient);
-    }
+  private String lastSequence;
+  private final String apiKey;
+  private final String apiId;
+  private final Queue<StreamsDatum> results;
+  private final MoreoverClient moClient;
+  private boolean started = false;
 
-    @Override
-    public void run() {
-        while(true) {
-            try {
-                ensureTime(moClient);
-                MoreoverResult result = moClient.getArticlesAfter(lastSequence, 500);
-                started = true;
-                lastSequence = result.process().toString();
-                for(StreamsDatum entry : ImmutableSet.copyOf(result.iterator()))
-                    results.offer(entry);
-                logger.info("ApiKey={}\tlastSequenceid={}", this.apiKey, lastSequence);
+  /**
+   * MoreoverProviderTask constructor.
+   * @param apiId apiId
+   * @param apiKey apiKey
+   * @param results results
+   * @param lastSequence lastSequence
+   */
+  public MoreoverProviderTask(String apiId, String apiKey, Queue<StreamsDatum> results, String lastSequence) {
+    //logger.info("Constructed new task {} for {} {} {}", UUID.randomUUID().toString(), apiId, apiKey, lastSequence);
+    this.apiId = apiId;
+    this.apiKey = apiKey;
+    this.results = results;
+    this.lastSequence = lastSequence;
+    this.moClient = new MoreoverClient(this.apiId, this.apiKey, this.lastSequence);
+    initializeClient(moClient);
+  }
 
-            } catch (Exception e) {
-                logger.error("Exception while polling moreover", e);
-            }
+  @Override
+  public void run() {
+    while (true) {
+      try {
+        ensureTime(moClient);
+        MoreoverResult result = moClient.getArticlesAfter(lastSequence, 500);
+        started = true;
+        lastSequence = result.process().toString();
+        for (StreamsDatum entry : ImmutableSet.copyOf(result.iterator())) {
+          results.offer(entry);
         }
+        logger.info("ApiKey={}\tlastSequenceid={}", this.apiKey, lastSequence);
+
+      } catch (Exception ex) {
+        logger.error("Exception while polling moreover", ex);
+      }
     }
+  }
 
-    private void ensureTime(MoreoverClient moClient) {
-        try {
-            long gap = System.currentTimeMillis() - moClient.pullTime;
-            if (gap < REQUIRED_LATENCY)
-                Thread.sleep(REQUIRED_LATENCY - gap);
-        } catch (Exception e) {
-            logger.warn("Error sleeping for latency");
-        }
+  private void ensureTime(MoreoverClient moClient) {
+    try {
+      long gap = System.currentTimeMillis() - moClient.pullTime;
+      if (gap < REQUIRED_LATENCY) {
+        Thread.sleep(REQUIRED_LATENCY - gap);
+      }
+    } catch (Exception ex) {
+      logger.warn("Error sleeping for latency");
     }
+  }
 
-    private void initializeClient(MoreoverClient moClient) {
-        try {
-            moClient.getArticlesAfter(this.lastSequence, 2);
-        } catch (Exception e) {
-            logger.error("Failed to start stream, {}", this.apiKey);
-            logger.error("Exception : ", e);
-            throw new IllegalStateException("Unable to initialize stream", e);
-        }
+  private void initializeClient(MoreoverClient moClient) {
+    try {
+      moClient.getArticlesAfter(this.lastSequence, 2);
+    } catch (Exception ex) {
+      logger.error("Failed to start stream, {}", this.apiKey);
+      logger.error("Exception : ", ex);
+      throw new IllegalStateException("Unable to initialize stream", ex);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResult.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResult.java b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResult.java
index e07084f..589e647 100644
--- a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResult.java
+++ b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResult.java
@@ -18,6 +18,8 @@
 
 package org.apache.streams.moreover;
 
+import org.apache.streams.core.StreamsDatum;
+
 import com.fasterxml.aalto.stax.InputFactoryImpl;
 import com.fasterxml.aalto.stax.OutputFactoryImpl;
 import com.fasterxml.jackson.databind.DeserializationFeature;
@@ -29,7 +31,6 @@ import com.fasterxml.jackson.dataformat.xml.XmlMapper;
 import com.google.common.collect.Lists;
 import com.moreover.api.Article;
 import com.moreover.api.ArticlesResponse;
-import org.apache.streams.core.StreamsDatum;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -38,163 +39,163 @@ import java.math.BigInteger;
 import java.util.Iterator;
 import java.util.List;
 
-
 public class MoreoverResult implements Iterable<StreamsDatum> {
 
-    private static final Logger logger = LoggerFactory.getLogger(MoreoverResult.class);
-
-    private ObjectMapper mapper;
-    private XmlMapper xmlMapper;
-
-    private String xmlString;
-    private String jsonString;
-    private ArticlesResponse resultObject;
-    private ArticlesResponse.Articles articles;
-    private List<Article> articleArray;
-    private long start;
-    private long end;
-    private String clientId;
-    private BigInteger maxSequencedId = BigInteger.ZERO;
-
-    protected ArticlesResponse response;
-    protected List<StreamsDatum> list = Lists.newArrayList();
-
-    protected MoreoverResult(String clientId, String xmlString, long start, long end) {
-        this.xmlString = xmlString;
-        this.clientId = clientId;
-        this.start = start;
-        this.end = end;
-        XmlFactory f = new XmlFactory(new InputFactoryImpl(),
-                new OutputFactoryImpl());
-
-        JacksonXmlModule module = new JacksonXmlModule();
-
-        module.setDefaultUseWrapper(false);
-
-        xmlMapper = new XmlMapper(f, module);
-
-        xmlMapper
-                .configure(
-                        DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
-                        Boolean.TRUE);
-        xmlMapper
-                .configure(
-                        DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
-                        Boolean.TRUE);
-        xmlMapper
-                .configure(
-                        DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
-                        Boolean.TRUE);
-        xmlMapper.configure(
-                DeserializationFeature.READ_ENUMS_USING_TO_STRING,
-                Boolean.TRUE);
-        xmlMapper.configure(
-                DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
-                Boolean.FALSE);
-
-        mapper = new ObjectMapper();
-
-        mapper
-                .configure(
-                        DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
-                        Boolean.TRUE);
-        mapper.configure(
-                DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
-                Boolean.TRUE);
-        mapper
-                .configure(
-                        DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
-                        Boolean.TRUE);
-        mapper.configure(
-                DeserializationFeature.READ_ENUMS_USING_TO_STRING,
-                Boolean.TRUE);
-
+  private static final Logger logger = LoggerFactory.getLogger(MoreoverResult.class);
+
+  private ObjectMapper mapper;
+  private XmlMapper xmlMapper;
+
+  private String xmlString;
+  private String jsonString;
+  private ArticlesResponse resultObject;
+  private ArticlesResponse.Articles articles;
+  private List<Article> articleArray;
+  private long start;
+  private long end;
+  private String clientId;
+  private BigInteger maxSequencedId = BigInteger.ZERO;
+
+  protected ArticlesResponse response;
+  protected List<StreamsDatum> list = Lists.newArrayList();
+
+  protected MoreoverResult(String clientId, String xmlString, long start, long end) {
+    this.xmlString = xmlString;
+    this.clientId = clientId;
+    this.start = start;
+    this.end = end;
+    XmlFactory xmlFactory = new XmlFactory(new InputFactoryImpl(),
+        new OutputFactoryImpl());
+
+    JacksonXmlModule module = new JacksonXmlModule();
+
+    module.setDefaultUseWrapper(false);
+
+    xmlMapper = new XmlMapper(xmlFactory, module);
+
+    xmlMapper
+        .configure(
+            DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
+            Boolean.TRUE);
+    xmlMapper
+        .configure(
+            DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
+            Boolean.TRUE);
+    xmlMapper
+        .configure(
+            DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
+            Boolean.TRUE);
+    xmlMapper.configure(
+        DeserializationFeature.READ_ENUMS_USING_TO_STRING,
+        Boolean.TRUE);
+    xmlMapper.configure(
+        DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
+        Boolean.FALSE);
+
+    mapper = new ObjectMapper();
+
+    mapper
+        .configure(
+            DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
+            Boolean.TRUE);
+    mapper.configure(
+        DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
+        Boolean.TRUE);
+    mapper
+        .configure(
+            DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
+            Boolean.TRUE);
+    mapper.configure(
+        DeserializationFeature.READ_ENUMS_USING_TO_STRING,
+        Boolean.TRUE);
+
+  }
+
+  public String getClientId() {
+    return clientId;
+  }
+
+  public long getStart() {
+    return start;
+  }
+
+  public long getEnd() {
+    return end;
+  }
+
+  /**
+   * Process batch and
+   * @return max sequenceId.
+   */
+  public BigInteger process() {
+
+    try {
+      this.resultObject = xmlMapper.readValue(xmlString, ArticlesResponse.class);
+    } catch (JsonMappingException ex) {
+      // theory is this may not be fatal
+      this.resultObject = (ArticlesResponse) ex.getPath().get(0).getFrom();
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      logger.warn("Unable to process document:");
+      logger.warn(xmlString);
     }
 
-    public String getClientId() {
-        return clientId;
+    if ( this.resultObject.getStatus().equals("FAILURE")) {
+      logger.warn(this.resultObject.getStatus());
+      logger.warn(this.resultObject.getMessageCode());
+      logger.warn(this.resultObject.getUserMessage());
+      logger.warn(this.resultObject.getDeveloperMessage());
+    } else {
+      this.articles = resultObject.getArticles();
+      this.articleArray = articles.getArticle();
+
+      for (Article article : articleArray) {
+        BigInteger sequenceid = new BigInteger(article.getSequenceId());
+        list.add(new StreamsDatum(article, sequenceid));
+        logger.trace("Prior max sequence Id {} current candidate {}", this.maxSequencedId, sequenceid);
+        if (sequenceid.compareTo(this.maxSequencedId) > 0) {
+          this.maxSequencedId = sequenceid;
+        }
+      }
     }
 
-    public long getStart() {
-        return start;
-    }
+    return this.maxSequencedId;
+  }
 
-    public long getEnd() {
-        return end;
-    }
+  public String getXmlString() {
+    return this.xmlString;
+  }
 
-    public BigInteger process() {
-
-        try {
-            this.resultObject = xmlMapper.readValue(xmlString, ArticlesResponse.class);
-        } catch (JsonMappingException e) {
-            // theory is this may not be fatal
-            this.resultObject = (ArticlesResponse) e.getPath().get(0).getFrom();
-        } catch (Exception e) {
-            e.printStackTrace();
-            logger.warn("Unable to process document:");
-            logger.warn(xmlString);
-        }
+  public BigInteger getMaxSequencedId() {
+    return this.maxSequencedId;
+  }
 
-        if( this.resultObject.getStatus().equals("FAILURE"))
-        {
-            logger.warn(this.resultObject.getStatus());
-            logger.warn(this.resultObject.getMessageCode());
-            logger.warn(this.resultObject.getUserMessage());
-            logger.warn(this.resultObject.getDeveloperMessage());
-        }
-        else
-        {
-            this.articles = resultObject.getArticles();
-            this.articleArray = articles.getArticle();
-
-            for (Article article : articleArray) {
-                BigInteger sequenceid = new BigInteger(article.getSequenceId());
-                list.add(new StreamsDatum(article, sequenceid));
-                logger.trace("Prior max sequence Id {} current candidate {}", this.maxSequencedId, sequenceid);
-                if (sequenceid.compareTo(this.maxSequencedId) > 0) {
-                    this.maxSequencedId = sequenceid;
-                }
-            }
-        }
+  @Override
+  public Iterator<StreamsDatum> iterator() {
+    return list.iterator();
+  }
 
-        return this.maxSequencedId;
-    }
+  protected static class JsonStringIterator implements Iterator<Serializable> {
 
-    public String getXmlString() {
-        return this.xmlString;
-    }
+    private Iterator<Serializable> underlying;
 
-    public BigInteger getMaxSequencedId() {
-        return this.maxSequencedId;
+    protected JsonStringIterator(Iterator<Serializable> underlying) {
+      this.underlying = underlying;
     }
 
     @Override
-    public Iterator<StreamsDatum> iterator() {
-        return list.iterator();
+    public boolean hasNext() {
+      return underlying.hasNext();
     }
 
-    protected static class JsonStringIterator implements Iterator<Serializable> {
-
-        private Iterator<Serializable> underlying;
-
-        protected JsonStringIterator(Iterator<Serializable> underlying) {
-            this.underlying = underlying;
-        }
-
-        @Override
-        public boolean hasNext() {
-            return underlying.hasNext();
-        }
-
-        @Override
-        public String next() {
-            return underlying.next().toString();
-        }
+    @Override
+    public String next() {
+      return underlying.next().toString();
+    }
 
-        @Override
-        public void remove() {
-            underlying.remove();
-        }
+    @Override
+    public void remove() {
+      underlying.remove();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResultSetWrapper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResultSetWrapper.java b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResultSetWrapper.java
deleted file mode 100644
index 0a47bd1..0000000
--- a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverResultSetWrapper.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.moreover;
-
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsResultSet;
-
-import java.util.Queue;
-
-public class MoreoverResultSetWrapper extends StreamsResultSet {
-
-    public MoreoverResultSetWrapper(MoreoverResult underlying) {
-        super((Queue<StreamsDatum>)underlying);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverUtils.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverUtils.java b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverUtils.java
index 8a91281..f9a3595 100644
--- a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverUtils.java
+++ b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverUtils.java
@@ -18,22 +18,19 @@
 
 package org.apache.streams.moreover;
 
-import com.moreover.api.Article;
-import com.moreover.api.Author;
-import com.moreover.api.AuthorPublishingPlatform;
-import com.moreover.api.Feed;
-import com.moreover.api.Source;
 import org.apache.streams.data.util.ActivityUtil;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Provider;
+
+import com.moreover.api.Article;
+import com.moreover.api.Author;
+import com.moreover.api.AuthorPublishingPlatform;
+import com.moreover.api.Feed;
+import com.moreover.api.Source;
 import org.joda.time.DateTime;
 
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
@@ -43,125 +40,156 @@ import static org.apache.streams.data.util.ActivityUtil.LANGUAGE_EXTENSION;
 import static org.apache.streams.data.util.ActivityUtil.LOCATION_EXTENSION;
 import static org.apache.streams.data.util.ActivityUtil.LOCATION_EXTENSION_COUNTRY;
 import static org.apache.streams.data.util.ActivityUtil.getObjectId;
-import static org.apache.streams.data.util.ActivityUtil.getProviderId;
 
 /**
- * Provides utilities for Moroever data
+ * Provides utilities for Moreover data.
  */
 public class MoreoverUtils {
-    private MoreoverUtils() {
-    }
-
-    public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss'Z'";
-
-    public static Activity convert(Article article) {
-        Activity activity = new Activity();
-        Source source = article.getSource();
-        activity.setActor(convert(article.getAuthor(), source.getName()));
-        activity.setProvider(convert(source));
-        activity.setTarget(convertTarget(source));
-        activity.setObject(convertObject(article));
-        activity.setPublished(DateTime.parse(article.getPublishedDate()));
-        activity.setContent(article.getContent());
-        activity.setTitle(article.getTitle());
-        activity.setVerb("posted");
-        fixActivityId(activity);
-        addLocationExtension(activity, source);
-        addLanguageExtension(activity, article);
-        activity.setLinks(convertLinks(article));
-        return activity;
-    }
 
-    private static void fixActivityId(Activity activity) {
-        if (activity.getId() != null && activity.getId().matches("\\{[a-z]*\\}")) {
-            activity.setId(null);
-        }
+  private MoreoverUtils() {
+  }
+
+  public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss'Z'";
+
+  /**
+   * convert article into Activity.
+   * @param article article
+   * @return Activity
+   */
+  public static Activity convert(Article article) {
+    Activity activity = new Activity();
+    Source source = article.getSource();
+    activity.setActor(convert(article.getAuthor(), source.getName()));
+    activity.setProvider(convert(source));
+    activity.setTarget(convertTarget(source));
+    activity.setObject(convertObject(article));
+    activity.setPublished(DateTime.parse(article.getPublishedDate()));
+    activity.setContent(article.getContent());
+    activity.setTitle(article.getTitle());
+    activity.setVerb("posted");
+    fixActivityId(activity);
+    addLocationExtension(activity, source);
+    addLanguageExtension(activity, article);
+    activity.setLinks(convertLinks(article));
+    return activity;
+  }
+
+  /**
+   * convert Source to Provider.
+   * @param source Source
+   * @return Provider
+   */
+  public static Provider convert(Source source) {
+    Provider provider = new Provider();
+    Feed feed = source.getFeed();
+    String display = getProviderId(feed);
+    provider.setId(ActivityUtil.getProviderId(display.trim().toLowerCase().replace(" ", "_")));
+    provider.setDisplayName(display);
+    provider.setUrl(feed.getUrl());
+    return provider;
+  }
+
+  /**
+   * convert Author and platformName to Actor.
+   * @param author Author
+   * @param platformName platformName
+   * @return $.actor
+   */
+  public static ActivityObject convert(Author author, String platformName) {
+    ActivityObject actor = new ActivityObject();
+    AuthorPublishingPlatform platform = author.getPublishingPlatform();
+    String userId = platform.getUserId();
+    if (userId != null) {
+      actor.setId(ActivityUtil.getPersonId(getProviderId(platformName), userId));
     }
-
-    private static List convertLinks(Article article) {
-        List<String> list = new LinkedList<>();
-        Article.OutboundUrls outboundUrls = article.getOutboundUrls();
-        if (outboundUrls != null) {
-            for (String url : outboundUrls.getOutboundUrl()) {
-                list.add(url);
-            }
-        }
-        return list;
-    }
-
-    public static ActivityObject convertTarget(Source source) {
-        ActivityObject object = new ActivityObject();
-        object.setUrl(source.getHomeUrl());
-        object.setDisplayName(source.getName());
-        return object;
-    }
-
-    public static ActivityObject convertObject(Article article) {
-        ActivityObject object = new ActivityObject();
-        object.setContent(article.getContent());
-        object.setSummary(article.getTitle());
-        object.setUrl(article.getOriginalUrl());
-        object.setObjectType(article.getDataFormat());
-        String type = article.getDataFormat().equals("text") ? "article" : article.getDataFormat();
-        object.setId(getObjectId(getProviderID(article.getSource().getFeed()), type, article.getId()));
-        object.setPublished(DateTime.parse(article.getPublishedDate()));
-        return object;
-    }
-
-    public static Provider convert(Source source) {
-        Provider provider = new Provider();
-        Feed feed = source.getFeed();
-        String display = getProviderID(feed);
-        provider.setId(getProviderId(display.trim().toLowerCase().replace(" ", "_")));
-        provider.setDisplayName(display);
-        provider.setUrl(feed.getUrl());
-        return provider;
+    actor.setDisplayName(author.getName());
+    actor.setUrl(author.getHomeUrl());
+    actor.setSummary(author.getDescription());
+    actor.setAdditionalProperty("email", author.getEmail());
+    return actor;
+  }
+
+  private static void fixActivityId(Activity activity) {
+    if (activity.getId() != null && activity.getId().matches("\\{[a-z]*\\}")) {
+      activity.setId(null);
     }
-
-    public static ActivityObject convert(Author author, String platformName) {
-        ActivityObject actor = new ActivityObject();
-        AuthorPublishingPlatform platform = author.getPublishingPlatform();
-        String userId = platform.getUserId();
-        if (userId != null) actor.setId(ActivityUtil.getPersonId(getProviderID(platformName), userId));
-        actor.setDisplayName(author.getName());
-        actor.setUrl(author.getHomeUrl());
-        actor.setSummary(author.getDescription());
-        actor.setAdditionalProperty("email", author.getEmail());
-        return actor;
+  }
+
+  private static List convertLinks(Article article) {
+    List<String> list = new LinkedList<>();
+    Article.OutboundUrls outboundUrls = article.getOutboundUrls();
+    if (outboundUrls != null) {
+      for (String url : outboundUrls.getOutboundUrl()) {
+        list.add(url);
+      }
     }
-
-    public static void addLocationExtension(Activity activity, Source value) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-        String country = value.getLocation().getCountryCode() == null ? value.getLocation().getCountry() : value.getLocation().getCountryCode();
-        if (country != null) {
-            Map<String, Object> location = new HashMap<>();
-            location.put(LOCATION_EXTENSION_COUNTRY, country);
-            extensions.put(LOCATION_EXTENSION, location);
-        }
+    return list;
+  }
+
+  /**
+   * convertTarget.
+   * @param source source
+   * @return ActivityObject $.target
+   */
+  public static ActivityObject convertTarget(Source source) {
+    ActivityObject object = new ActivityObject();
+    object.setUrl(source.getHomeUrl());
+    object.setDisplayName(source.getName());
+    return object;
+  }
+
+  /**
+   * convertObject.
+   * @param article article
+   * @return ActivityObject $.object
+   */
+  public static ActivityObject convertObject(Article article) {
+    ActivityObject object = new ActivityObject();
+    object.setContent(article.getContent());
+    object.setSummary(article.getTitle());
+    object.setUrl(article.getOriginalUrl());
+    object.setObjectType(article.getDataFormat());
+    String type = article.getDataFormat().equals("text") ? "article" : article.getDataFormat();
+    object.setId(getObjectId(getProviderId(article.getSource().getFeed()), type, article.getId()));
+    object.setPublished(DateTime.parse(article.getPublishedDate()));
+    return object;
+  }
+
+  /**
+   * addLocationExtension.
+   * @param activity Activity
+   * @param source Source
+   */
+  public static void addLocationExtension(Activity activity, Source source) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+    String country = source.getLocation().getCountryCode() == null
+        ? source.getLocation().getCountry()
+        : source.getLocation().getCountryCode();
+    if (country != null) {
+      Map<String, Object> location = new HashMap<>();
+      location.put(LOCATION_EXTENSION_COUNTRY, country);
+      extensions.put(LOCATION_EXTENSION, location);
     }
-
-    public static void addLanguageExtension(Activity activity, Article value) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-        String language = value.getLanguage();
-        if (language != null) {
-            extensions.put(LANGUAGE_EXTENSION, language);
-        }
+  }
+
+  /**
+   * addLanguageExtension.
+   * @param activity Activity
+   * @param article Article
+   */
+  public static void addLanguageExtension(Activity activity, Article article) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+    String language = article.getLanguage();
+    if (language != null) {
+      extensions.put(LANGUAGE_EXTENSION, language);
     }
+  }
 
-    public static Date parse(String str) {
-        DateFormat fmt = new SimpleDateFormat(DATE_FORMAT);
-        try {
-            return fmt.parse(str);
-        } catch (ParseException e) {
-            throw new IllegalArgumentException("Invalid date format", e);
-        }
-    }
+  private static String getProviderId(Feed feed) {
+    return getProviderId(feed.getPublishingPlatform() == null ? feed.getMediaType() : feed.getPublishingPlatform());
+  }
 
-    private static String getProviderID(Feed feed) {
-        return getProviderID(feed.getPublishingPlatform() == null ? feed.getMediaType() : feed.getPublishingPlatform());
-    }
-
-    private static String getProviderID(String feed) {
-        return feed.toLowerCase().replace(" ", "_").trim();
-    }
+  private static String getProviderId(String feed) {
+    return feed.toLowerCase().replace(" ", "_").trim();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverXmlActivitySerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverXmlActivitySerializer.java b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverXmlActivitySerializer.java
index 4b7b3b0..fe4378c 100644
--- a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverXmlActivitySerializer.java
+++ b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverXmlActivitySerializer.java
@@ -18,89 +18,89 @@
 
 package org.apache.streams.moreover;
 
+import org.apache.streams.data.ActivitySerializer;
+import org.apache.streams.pojo.json.Activity;
+
 import com.moreover.api.Article;
 import com.moreover.api.ArticlesResponse;
 import com.moreover.api.ObjectFactory;
 import org.apache.commons.lang.SerializationException;
-import org.apache.streams.data.ActivitySerializer;
-import org.apache.streams.moreover.MoreoverUtils;
-import org.apache.streams.pojo.json.Activity;
 
+import java.io.StringReader;
+import java.util.LinkedList;
+import java.util.List;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBElement;
 import javax.xml.bind.JAXBException;
 import javax.xml.bind.Unmarshaller;
-import java.io.StringReader;
-import java.util.LinkedList;
-import java.util.List;
 
 /**
- * Deserializes the Moreover Article XML and converts it to an instance of {@link Activity}
+ * Deserializes the Moreover Article XML and converts it to an instance of {@link Activity}.
  */
 public class MoreoverXmlActivitySerializer implements ActivitySerializer<String> {
 
-    //JAXBContext is threadsafe (supposedly)
-    private final JAXBContext articleContext;
-    private final JAXBContext articlesContext;
+  //JAXBContext is threadsafe (supposedly)
+  private final JAXBContext articleContext;
+  private final JAXBContext articlesContext;
 
-    public MoreoverXmlActivitySerializer() {
-        articleContext = createContext(Article.class);
-        articlesContext = createContext(ArticlesResponse.class);
-    }
+  public MoreoverXmlActivitySerializer() {
+    articleContext = createContext(Article.class);
+    articlesContext = createContext(ArticlesResponse.class);
+  }
 
-    @Override
-    public String serializationFormat() {
-        return "application/xml+vnd.moreover.com.v1";
-    }
+  @Override
+  public String serializationFormat() {
+    return "application/xml+vnd.moreover.com.v1";
+  }
 
-    @Override
-    public String serialize(Activity deserialized) {
-        throw new UnsupportedOperationException("Cannot currently serialize to Moreover");
-    }
+  @Override
+  public String serialize(Activity deserialized) {
+    throw new UnsupportedOperationException("Cannot currently serialize to Moreover");
+  }
 
-    @Override
-    public Activity deserialize(String serialized) {
-        Article article = deserializeMoreover(serialized);
-        return MoreoverUtils.convert(article);
-    }
+  @Override
+  public Activity deserialize(String serialized) {
+    Article article = deserializeMoreover(serialized);
+    return MoreoverUtils.convert(article);
+  }
 
-    @Override
-    public List<Activity> deserializeAll(List<String> serializedList) {
-        List<Activity> activities = new LinkedList<Activity>();
-        for(String item : serializedList) {
-            ArticlesResponse response = deserializeMoreoverResponse(item);
-            for(Article article : response.getArticles().getArticle()) {
-                activities.add(MoreoverUtils.convert(article));
-            }
-        }
-        return activities;
+  @Override
+  public List<Activity> deserializeAll(List<String> serializedList) {
+    List<Activity> activities = new LinkedList<Activity>();
+    for (String item : serializedList) {
+      ArticlesResponse response = deserializeMoreoverResponse(item);
+      for (Article article : response.getArticles().getArticle()) {
+        activities.add(MoreoverUtils.convert(article));
+      }
     }
+    return activities;
+  }
 
-    private Article deserializeMoreover(String serialized){
-        try {
-            Unmarshaller unmarshaller = articleContext.createUnmarshaller();
-            return (Article) unmarshaller.unmarshal(new StringReader(serialized));
-        } catch (JAXBException e) {
-            throw new SerializationException("Unable to deserialize Moreover data", e);
-        }
+  private Article deserializeMoreover(String serialized) {
+    try {
+      Unmarshaller unmarshaller = articleContext.createUnmarshaller();
+      return (Article) unmarshaller.unmarshal(new StringReader(serialized));
+    } catch (JAXBException ex) {
+      throw new SerializationException("Unable to deserialize Moreover data", ex);
     }
+  }
 
-    private ArticlesResponse deserializeMoreoverResponse(String serialized){
-        try {
-            Unmarshaller unmarshaller = articlesContext.createUnmarshaller();
-            return ((JAXBElement<ArticlesResponse>) unmarshaller.unmarshal(new StringReader(serialized))).getValue();
-        } catch (JAXBException e) {
-            throw new SerializationException("Unable to deserialize Moreover data", e);
-        }
+  private ArticlesResponse deserializeMoreoverResponse(String serialized) {
+    try {
+      Unmarshaller unmarshaller = articlesContext.createUnmarshaller();
+      return ((JAXBElement<ArticlesResponse>) unmarshaller.unmarshal(new StringReader(serialized))).getValue();
+    } catch (JAXBException ex) {
+      throw new SerializationException("Unable to deserialize Moreover data", ex);
     }
+  }
 
-    private JAXBContext createContext(Class articleClass) {
-        JAXBContext context;
-        try {
-            context = JAXBContext.newInstance(articleClass.getPackage().getName(), ObjectFactory.class.getClassLoader());
-        } catch (JAXBException e) {
-            throw new IllegalStateException("Unable to create JAXB Context for Moreover data", e);
-        }
-        return context;
+  private JAXBContext createContext(Class articleClass) {
+    JAXBContext context;
+    try {
+      context = JAXBContext.newInstance(articleClass.getPackage().getName(), ObjectFactory.class.getClassLoader());
+    } catch (JAXBException ex) {
+      throw new IllegalStateException("Unable to create JAXB Context for Moreover data", ex);
     }
+    return context;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/MoreoverTestUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/MoreoverTestUtil.java b/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/MoreoverTestUtil.java
index cdd5822..c9bd823 100644
--- a/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/MoreoverTestUtil.java
+++ b/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/MoreoverTestUtil.java
@@ -19,25 +19,35 @@
 package org.apache.streams.moreover;
 
 import org.apache.streams.pojo.json.Activity;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import static java.util.regex.Pattern.matches;
-import static org.hamcrest.CoreMatchers.*;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.nullValue;
 import static org.junit.Assert.assertThat;
 
+/**
+ * MoreoverTestUtil.
+ */
 public class MoreoverTestUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(MoreoverTestUtil.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(MoreoverTestUtil.class);
 
-    public static void test(Activity activity) {
-        assertThat(activity, is(not(nullValue())));
-        assertThat(activity.getActor(), is(not(nullValue())));
-        assertThat(activity.getObject(), is(not(nullValue())));
-        if(activity.getObject().getId() != null) {
-            assertThat(matches("id:.*:[a-z]*s:[a-zA-Z0-9]*", activity.getObject().getId()), is(true));
-        }
-        assertThat(activity.getObject().getObjectType(), is(not(nullValue())));
-        LOGGER.debug(activity.getPublished().toString());
+  /**
+   *
+   * @param activity
+   */
+  public static void validate(Activity activity) {
+    assertThat(activity, is(not(nullValue())));
+    assertThat(activity.getActor(), is(not(nullValue())));
+    assertThat(activity.getObject(), is(not(nullValue())));
+    if(activity.getObject().getId() != null) {
+      assertThat(matches("id:.*:[a-z]*s:[a-zA-Z0-9]*", activity.getObject().getId()), is(true));
     }
+    assertThat(activity.getObject().getObjectType(), is(not(nullValue())));
+    LOGGER.debug(activity.getPublished().toString());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverJsonActivitySerializerIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverJsonActivitySerializerIT.java b/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverJsonActivitySerializerIT.java
index 94ef097..b7ae076 100644
--- a/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverJsonActivitySerializerIT.java
+++ b/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverJsonActivitySerializerIT.java
@@ -18,13 +18,15 @@
 
 package org.apache.streams.moreover.test;
 
+import org.apache.streams.data.ActivitySerializer;
+import org.apache.streams.moreover.MoreoverJsonActivitySerializer;
+import org.apache.streams.moreover.MoreoverTestUtil;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.commons.io.IOUtils;
-import org.apache.streams.data.ActivitySerializer;
-import org.apache.streams.moreover.MoreoverJsonActivitySerializer;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -32,45 +34,48 @@ import java.io.InputStream;
 import java.io.StringWriter;
 import java.nio.charset.Charset;
 
-import static org.apache.streams.moreover.MoreoverTestUtil.test;
-
 /**
- * Tests ability to serialize moreover json Strings
+ * Tests ability to serialize moreover json Strings.
  */
 public class MoreoverJsonActivitySerializerIT {
-    JsonNode json;
-    ActivitySerializer serializer = new MoreoverJsonActivitySerializer();
-    ObjectMapper mapper;
 
-    @Before
-    public void setup() throws Exception {
+  JsonNode json;
+  ActivitySerializer serializer = new MoreoverJsonActivitySerializer();
+  ObjectMapper mapper;
 
-        StringWriter writer = new StringWriter();
-        InputStream resourceAsStream = this.getClass().getResourceAsStream("/moreover.json");
-        IOUtils.copy(resourceAsStream, writer, Charset.forName("UTF-8"));
+  /**
+   * Before.
+   * @throws Exception Exception
+   */
+  @Before
+  public void setup() throws Exception {
 
-        mapper = new ObjectMapper();
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.FALSE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+    StringWriter writer = new StringWriter();
+    InputStream resourceAsStream = this.getClass().getResourceAsStream("/moreover.json");
+    IOUtils.copy(resourceAsStream, writer, Charset.forName("UTF-8"));
 
-        json = mapper.readValue(writer.toString(), JsonNode.class);
-    }
+    mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.FALSE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+
+    json = mapper.readValue(writer.toString(), JsonNode.class);
+  }
 
-    @Test
-    public void loadData() throws Exception {
-        for (JsonNode item : json) {
-            test(serializer.deserialize(getString(item)));
-        }
+  @Test
+  public void loadData() throws Exception {
+    for (JsonNode item : json) {
+      MoreoverTestUtil.validate(serializer.deserialize(getString(item)));
     }
+  }
 
 
-    private String getString(JsonNode jsonNode)  {
-        try {
-            return new ObjectMapper().writeValueAsString(jsonNode);
-        } catch (JsonProcessingException e) {
-            throw new RuntimeException(e);
-        }
+  private String getString(JsonNode jsonNode)  {
+    try {
+      return new ObjectMapper().writeValueAsString(jsonNode);
+    } catch (JsonProcessingException ex) {
+      throw new RuntimeException(ex);
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverXmlActivitySerializerIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverXmlActivitySerializerIT.java b/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverXmlActivitySerializerIT.java
index ad0b384..2d93656 100644
--- a/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverXmlActivitySerializerIT.java
+++ b/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/MoreoverXmlActivitySerializerIT.java
@@ -18,10 +18,13 @@
 
 package org.apache.streams.moreover.test;
 
-import com.google.common.collect.Lists;
-import org.apache.commons.io.IOUtils;
 import org.apache.streams.data.ActivitySerializer;
+import org.apache.streams.moreover.MoreoverTestUtil;
+import org.apache.streams.moreover.MoreoverXmlActivitySerializer;
 import org.apache.streams.pojo.json.Activity;
+
+import com.google.common.collect.Lists;
+import org.apache.commons.io.IOUtils;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -31,36 +34,32 @@ import java.io.StringWriter;
 import java.nio.charset.Charset;
 import java.util.List;
 
-import org.apache.streams.moreover.MoreoverXmlActivitySerializer;
-
-import static org.apache.streams.moreover.MoreoverTestUtil.test;
-
 /**
  * Tests ability to serialize moreover xml Strings
  */
 public class MoreoverXmlActivitySerializerIT {
-    ActivitySerializer serializer;
-    private String xml;
+  ActivitySerializer serializer;
+  private String xml;
 
-    @Before
-    public void setup() throws IOException {
-        serializer = new MoreoverXmlActivitySerializer();
-        xml = loadXml();
-    }
+  @Before
+  public void setup() throws IOException {
+    serializer = new MoreoverXmlActivitySerializer();
+    xml = loadXml();
+  }
 
-    @Test
-    public void loadData() throws Exception {
-        List<Activity> activities = serializer.deserializeAll(Lists.newArrayList(xml));
-        for (Activity activity : activities) {
-            test(activity);
-        }
+  @Test
+  public void loadData() throws Exception {
+    List<Activity> activities = serializer.deserializeAll(Lists.newArrayList(xml));
+    for (Activity activity : activities) {
+      MoreoverTestUtil.validate(activity);
     }
+  }
 
-    private String loadXml() throws IOException {
-        StringWriter writer = new StringWriter();
-        InputStream resourceAsStream = this.getClass().getResourceAsStream("/moreover.xml");
-        IOUtils.copy(resourceAsStream, writer, Charset.forName("UTF-8"));
-        return writer.toString();
-    }
+  private String loadXml() throws IOException {
+    StringWriter writer = new StringWriter();
+    InputStream resourceAsStream = this.getClass().getResourceAsStream("/moreover.xml");
+    IOUtils.copy(resourceAsStream, writer, Charset.forName("UTF-8"));
+    return writer.toString();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/provider/MoreoverProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/provider/MoreoverProviderIT.java b/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/provider/MoreoverProviderIT.java
index 2bc672d..f5b61bf 100644
--- a/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/provider/MoreoverProviderIT.java
+++ b/streams-contrib/streams-provider-moreover/src/test/java/org/apache/streams/moreover/test/provider/MoreoverProviderIT.java
@@ -18,10 +18,11 @@
 
 package org.apache.streams.moreover.test.provider;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.moreover.MoreoverProvider;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.Lists;
-import org.apache.streams.moreover.MoreoverProvider;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -32,36 +33,34 @@ import java.io.FileReader;
 import java.io.LineNumberReader;
 
 /**
- * Integration test for MoreoverProviderIT
- *
- * Created by sblackmon on 10/21/16.
+ * Integration test for MoreoverProviderIT.
  */
 @Ignore("this is ignored because the project doesn't have credentials to test it with during CI")
 public class MoreoverProviderIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(MoreoverProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(MoreoverProviderIT.class);
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Test
-    public void testRssStreamProvider() throws Exception {
+  @Test
+  public void testRssStreamProvider() throws Exception {
 
-        String configfile = "./target/test-classes/RssStreamProviderIT.conf";
-        String outfile = "./target/test-classes/RssStreamProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/RssStreamProviderIT.conf";
+    String outfile = "./target/test-classes/RssStreamProviderIT.stdout.txt";
 
-        MoreoverProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
+    MoreoverProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 1);
+    assert (outCounter.getLineNumber() >= 1);
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/processor/RssTypeConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/processor/RssTypeConverter.java b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/processor/RssTypeConverter.java
index 1df1ff9..d3c763a 100644
--- a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/processor/RssTypeConverter.java
+++ b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/processor/RssTypeConverter.java
@@ -18,13 +18,14 @@
 
 package org.apache.streams.rss.processor;
 
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.rss.serializer.SyndEntryActivitySerializer;
+
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.NotImplementedException;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.slf4j.Logger;
@@ -35,45 +36,45 @@ import java.util.List;
 /**
  * Converts ObjectNode representations of Rome SyndEntries to activities.
  */
-public class RssTypeConverter implements StreamsProcessor{
-
-    public final static String STREAMS_ID = "RssTypeConverter";
+public class RssTypeConverter implements StreamsProcessor {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(RssTypeConverter.class);
+  public static final String STREAMS_ID = "RssTypeConverter";
 
-    private SyndEntryActivitySerializer serializer;
-    private int successCount = 0;
-    private int failCount = 0;
+  private static final Logger LOGGER = LoggerFactory.getLogger(RssTypeConverter.class);
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  private SyndEntryActivitySerializer serializer;
+  private int successCount = 0;
+  private int failCount = 0;
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum datum) {
-        List<StreamsDatum> datums = Lists.newLinkedList();
-        if(datum.getDocument() instanceof ObjectNode) {
-            Activity activity = this.serializer.deserialize((ObjectNode) datum.getDocument());
-            datums.add(new StreamsDatum(activity, activity.getId(), DateTime.now().withZone(DateTimeZone.UTC)));
-            successCount ++;
-        } else {
-            failCount ++;
-            throw new NotImplementedException("Not implemented for class type : "+ datum.getDocument().getClass().toString());
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        }
-        LOGGER.debug("Processor current success count: {} and current fail: {}", successCount, failCount);
+  @Override
+  public List<StreamsDatum> process(StreamsDatum datum) {
+    List<StreamsDatum> datums = Lists.newLinkedList();
+    if (datum.getDocument() instanceof ObjectNode) {
+      Activity activity = this.serializer.deserialize((ObjectNode) datum.getDocument());
+      datums.add(new StreamsDatum(activity, activity.getId(), DateTime.now().withZone(DateTimeZone.UTC)));
+      successCount ++;
+    } else {
+      failCount ++;
+      throw new NotImplementedException("Not implemented for class type : " + datum.getDocument().getClass().toString());
 
-        return datums;
     }
+    LOGGER.debug("Processor current success count: {} and current fail: {}", successCount, failCount);
 
-    @Override
-    public void prepare(Object o) {
-        this.serializer = new SyndEntryActivitySerializer();
-    }
+    return datums;
+  }
 
-    @Override
-    public void cleanUp() {
+  @Override
+  public void prepare(Object configurationObject) {
+    this.serializer = new SyndEntryActivitySerializer();
+  }
 
-    }
+  @Override
+  public void cleanUp() {
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventClassifier.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventClassifier.java b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventClassifier.java
deleted file mode 100644
index 4e6efee..0000000
--- a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventClassifier.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.rss.provider;
-
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.sun.syndication.feed.synd.SyndEntry;
-
-/**
- * Created by sblackmon on 12/13/13.
- */
-public class RssEventClassifier {
-
-    public static Class detectClass( ObjectNode bean ) {
-        return SyndEntry.class;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventProcessor.java b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventProcessor.java
index 75d275d..078356c 100644
--- a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventProcessor.java
+++ b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssEventProcessor.java
@@ -18,86 +18,100 @@
 
 package org.apache.streams.rss.provider;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.sun.syndication.feed.synd.SyndEntry;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.rss.serializer.SyndEntryActivitySerializer;
 import org.apache.streams.rss.serializer.SyndEntrySerializer;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.sun.syndication.feed.synd.SyndEntry;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Queue;
 import java.util.Random;
 
+/**
+ * RssEventProcessor processes Rss Events.
+ */
 public class RssEventProcessor implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(RssEventProcessor.class);
-
-    private ObjectMapper mapper = new ObjectMapper();
-
-    private Queue<SyndEntry> inQueue;
-    private Queue<StreamsDatum> outQueue;
-
-    private Class inClass;
-    private Class outClass;
-
-    private SyndEntryActivitySerializer syndEntryActivitySerializer = new SyndEntryActivitySerializer();
-    private SyndEntrySerializer syndEntrySerializer = new SyndEntrySerializer();
-
-    public final static String TERMINATE = new String("TERMINATE");
-
-    public RssEventProcessor(Queue<SyndEntry> inQueue, Queue<StreamsDatum> outQueue, Class inClass, Class outClass) {
-        this.inQueue = inQueue;
-        this.outQueue = outQueue;
-        this.inClass = inClass;
-        this.outClass = outClass;
-    }
-
-    public RssEventProcessor(Queue<SyndEntry> inQueue, Queue<StreamsDatum> outQueue, Class outClass) {
-        this.inQueue = inQueue;
-        this.outQueue = outQueue;
-        this.outClass = outClass;
-    }
+  private static final Logger LOGGER = LoggerFactory.getLogger(RssEventProcessor.class);
+
+  private ObjectMapper mapper = new ObjectMapper();
+
+  private Queue<SyndEntry> inQueue;
+  private Queue<StreamsDatum> outQueue;
+
+  private Class inClass;
+  private Class outClass;
+
+  private SyndEntryActivitySerializer syndEntryActivitySerializer = new SyndEntryActivitySerializer();
+  private SyndEntrySerializer syndEntrySerializer = new SyndEntrySerializer();
+
+  public static final String TERMINATE = new String("TERMINATE");
+
+  /**
+   * RssEventProcessor constructor.
+   * @param inQueue inQueue
+   * @param outQueue outQueue
+   * @param inClass inClass
+   * @param outClass outClass
+   */
+  public RssEventProcessor(Queue<SyndEntry> inQueue, Queue<StreamsDatum> outQueue, Class inClass, Class outClass) {
+    this.inQueue = inQueue;
+    this.outQueue = outQueue;
+    this.inClass = inClass;
+    this.outClass = outClass;
+  }
+
+  /**
+   * RssEventProcessor constructor.
+   * @param inQueue inQueue
+   * @param outQueue outQueue
+   * @param outClass outClass
+   */
+  public RssEventProcessor(Queue<SyndEntry> inQueue, Queue<StreamsDatum> outQueue, Class outClass) {
+    this.inQueue = inQueue;
+    this.outQueue = outQueue;
+    this.outClass = outClass;
+  }
+
+  @Override
+  public void run() {
+
+    while (true) {
+      Object item;
+      try {
+        item = inQueue.poll();
+        if (item instanceof String && item.equals(TERMINATE)) {
+          LOGGER.info("Terminating!");
+          break;
+        }
 
-    @Override
-    public void run() {
-
-        while(true) {
-            Object item;
-            try {
-                item = inQueue.poll();
-                if(item instanceof String && item.equals(TERMINATE)) {
-                    LOGGER.info("Terminating!");
-                    break;
-                }
-
-                Thread.sleep(new Random().nextInt(100));
-
-                // if the target is string, just pass-through
-                if( String.class.equals(outClass))
-                    outQueue.offer(new StreamsDatum(item.toString()));
-                else if( SyndEntry.class.equals(outClass))
-                {
-                    outQueue.offer(new StreamsDatum(item));
-                }
-                else if( Activity.class.equals(outClass))
-                {
-                    // convert to desired format
-                    SyndEntry entry = (SyndEntry)item;
-                    if( entry != null ) {
-                        Activity out = syndEntryActivitySerializer.deserialize(this.syndEntrySerializer.deserialize((SyndEntry)item));
-
-                        if( out != null )
-                            outQueue.offer(new StreamsDatum(out));
-                    }
-                }
-
-            } catch (Exception e) {
-                e.printStackTrace();
+        Thread.sleep(new Random().nextInt(100));
+
+        // if the target is string, just pass-through
+        if ( String.class.equals(outClass)) {
+          outQueue.offer(new StreamsDatum(item.toString()));
+        } else if ( SyndEntry.class.equals(outClass)) {
+          outQueue.offer(new StreamsDatum(item));
+        } else if ( Activity.class.equals(outClass)) {
+          // convert to desired format
+          SyndEntry entry = (SyndEntry)item;
+          if ( entry != null ) {
+            Activity out = syndEntryActivitySerializer.deserialize(this.syndEntrySerializer.deserialize((SyndEntry)item));
+
+            if ( out != null ) {
+              outQueue.offer(new StreamsDatum(out));
             }
+          }
         }
+
+      } catch (Exception ex) {
+        ex.printStackTrace();
+      }
     }
+  }
 
-};
+}


[38/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchMetadataUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchMetadataUtil.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchMetadataUtil.java
index 100b0c5..8fbbf3c 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchMetadataUtil.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchMetadataUtil.java
@@ -18,128 +18,197 @@
 
 package org.apache.streams.elasticsearch;
 
-import com.fasterxml.jackson.databind.JsonNode;
 import org.apache.streams.core.StreamsDatum;
 
+import com.fasterxml.jackson.databind.JsonNode;
+
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 
+/**
+ * Utility class for handling Elasticsearch Metadata maps.
+ */
 public class ElasticsearchMetadataUtil {
 
-    public static String getIndex(Map<String, Object> metadata, ElasticsearchWriterConfiguration config) {
+  /**
+   * get Index to use based on supplied parameters.
+   *
+   * @param metadata metadata
+   * @param config config
+   * @return result
+   */
+  public static String getIndex(Map<String, Object> metadata, ElasticsearchWriterConfiguration config) {
 
-        String index = null;
+    String index = null;
 
-        if( metadata != null && metadata.containsKey("index"))
-            index = (String) metadata.get("index");
-
-        if(index == null || (config.getForceUseConfig() != null && config.getForceUseConfig())) {
-            index = config.getIndex();
-        }
-
-        return index;
+    if ( metadata != null && metadata.containsKey("index")) {
+      index = (String) metadata.get("index");
     }
 
-    public static String getType(Map<String, Object> metadata, ElasticsearchWriterConfiguration config) {
-
-        String type = null;
+    if ( index == null || (config.getForceUseConfig() != null && config.getForceUseConfig())) {
+      index = config.getIndex();
+    }
 
-        if( metadata != null && metadata.containsKey("type"))
-            type = (String) metadata.get("type");
+    return index;
+  }
 
-        if(type == null || (config.getForceUseConfig() != null && config.getForceUseConfig())) {
-            type = config.getType();
-        }
+  /**
+   * get Index to use based on supplied parameters.
+   *
+   * @param metadata metadata
+   * @param config config
+   * @return result
+   */
+  public static String getIndex(Map<String, Object> metadata, ElasticsearchReaderConfiguration config) {
 
+    String index = null;
 
-        return type;
+    if ( metadata != null && metadata.containsKey("index")) {
+      index = (String) metadata.get("index");
     }
 
-    public static String getIndex(Map<String, Object> metadata, ElasticsearchReaderConfiguration config) {
+    if ( index == null ) {
+      index = config.getIndexes().get(0);
+    }
 
-        String index = null;
+    return index;
+  }
 
-        if( metadata != null && metadata.containsKey("index"))
-            index = (String) metadata.get("index");
+  /**
+   * get Type to use based on supplied parameters.
+   *
+   * @param metadata metadata
+   * @param config config
+   * @return result
+   */
+  public static String getType(Map<String, Object> metadata, ElasticsearchWriterConfiguration config) {
 
-        if(index == null) {
-            index = config.getIndexes().get(0);
-        }
+    String type = null;
 
-        return index;
+    if ( metadata != null && metadata.containsKey("type")) {
+      type = (String) metadata.get("type");
     }
 
-    public static String getType(Map<String, Object> metadata, ElasticsearchReaderConfiguration config) {
+    if (type == null || (config.getForceUseConfig() != null && config.getForceUseConfig())) {
+      type = config.getType();
+    }
 
-        String type = null;
+    return type;
+  }
 
-        if( metadata != null && metadata.containsKey("type"))
-            type = (String) metadata.get("type");
+  /**
+   * get Type to use based on supplied parameters.
+   *
+   * @param metadata metadata
+   * @param config config
+   * @return result
+   */
+  public static String getType(Map<String, Object> metadata, ElasticsearchReaderConfiguration config) {
 
-        if(type == null) {
-            type = config.getTypes().get(0);
-        }
+    String type = null;
 
+    if ( metadata != null && metadata.containsKey("type")) {
+      type = (String) metadata.get("type");
+    }
 
-        return type;
+    if (type == null) {
+      type = config.getTypes().get(0);
     }
 
-    public static String getId(StreamsDatum datum) {
 
-        String id = datum.getId();
+    return type;
+  }
+
+  /**
+   * get id to use based on supplied parameters.
+   *
+   * @param datum datum
+   * @return result
+   */
+  public static String getId(StreamsDatum datum) {
 
-        Map<String, Object> metadata = datum.getMetadata();
+    String id = datum.getId();
 
-        if( id == null && metadata != null && metadata.containsKey("id"))
-            id = (String) datum.getMetadata().get("id");
+    Map<String, Object> metadata = datum.getMetadata();
 
-        return id;
+    if ( id == null && metadata != null && metadata.containsKey("id")) {
+      id = (String) datum.getMetadata().get("id");
     }
 
-    static String getParent(StreamsDatum datum) {
+    return id;
+  }
 
-        String parent = null;
+  /**
+   * get id to use based on supplied parameters.
+   *
+   * @param metadata metadata
+   * @return result
+   */
+  public static String getId(Map<String, Object> metadata) {
 
-        Map<String, Object> metadata = datum.getMetadata();
+    return (String) metadata.get("id");
 
-        if(metadata != null && metadata.containsKey("parent"))
-            parent = (String) datum.getMetadata().get("parent");
+  }
 
-        return parent;
-    }
+  /**
+   * get parent id to use based on supplied parameters.
+   *
+   * @param datum datum
+   * @return result
+   */
+  static String getParent(StreamsDatum datum) {
 
-    static String getRouting(StreamsDatum datum) {
+    String parent = null;
 
-        String routing = null;
+    Map<String, Object> metadata = datum.getMetadata();
 
-        Map<String, Object> metadata = datum.getMetadata();
+    if (metadata != null && metadata.containsKey("parent")) {
+      parent = (String) datum.getMetadata().get("parent");
+    }
 
-        if(metadata != null && metadata.containsKey("routing"))
-            routing = (String) datum.getMetadata().get("routing");
+    return parent;
+  }
 
-        return routing;
-    }
+  /**
+   * get routing id to use based on supplied parameters.
+   *
+   * @param datum datum
+   * @return result
+   */
+  static String getRouting(StreamsDatum datum) {
 
-    public static String getId(Map<String, Object> metadata) {
+    String routing = null;
 
-        return (String) metadata.get("id");
+    Map<String, Object> metadata = datum.getMetadata();
 
+    if (metadata != null && metadata.containsKey("routing")) {
+      routing = (String) datum.getMetadata().get("routing");
     }
 
-    public static Map<String, Object> asMap(JsonNode node) {
+    return routing;
+  }
 
-        Iterator<Map.Entry<String, JsonNode>> iterator = node.fields();
-        Map<String, Object> ret = new HashMap<>();
+  /**
+   * get JsonNode as Map.
+   * @param node node
+   * @return result
+   */
+  // TODO: move this to a utility package
+  public static Map<String, Object> asMap(JsonNode node) {
 
-        Map.Entry<String, JsonNode> entry;
+    Iterator<Map.Entry<String, JsonNode>> iterator = node.fields();
+    Map<String, Object> ret = new HashMap<>();
 
-        while (iterator.hasNext()) {
-            entry = iterator.next();
-            if( entry.getValue().asText() != null )
-                ret.put(entry.getKey(), entry.getValue().asText());
-        }
+    Map.Entry<String, JsonNode> entry;
 
-        return ret;
+    while (iterator.hasNext()) {
+      entry = iterator.next();
+      if ( entry.getValue().asText() != null ) {
+        ret.put(entry.getKey(), entry.getValue().asText());
+      }
     }
+
+    return ret;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistDeleter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistDeleter.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistDeleter.java
index af754ad..789b62f 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistDeleter.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistDeleter.java
@@ -18,90 +18,106 @@
 
 package org.apache.streams.elasticsearch;
 
-import com.google.common.base.Preconditions;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
+
+import com.google.common.base.Preconditions;
+
 import org.elasticsearch.action.delete.DeleteRequest;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Map;
 
+/**
+ * ElasticsearchPersistDeleter deletes documents from elasticsearch.
+ */
 public class ElasticsearchPersistDeleter extends ElasticsearchPersistWriter implements StreamsPersistWriter {
 
-    public static final String STREAMS_ID = ElasticsearchPersistDeleter.class.getCanonicalName();
+  public static final String STREAMS_ID = ElasticsearchPersistDeleter.class.getCanonicalName();
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistDeleter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistDeleter.class);
 
-    public ElasticsearchPersistDeleter() {
-        super();
-    }
+  public ElasticsearchPersistDeleter() {
+    super();
+  }
 
-    public ElasticsearchPersistDeleter(ElasticsearchWriterConfiguration config) {
-        super(config);
-    }
+  public ElasticsearchPersistDeleter(ElasticsearchWriterConfiguration config) {
+    super(config);
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public void write(StreamsDatum streamsDatum) {
+  @Override
+  public void write(StreamsDatum streamsDatum) {
 
-        if(streamsDatum == null || streamsDatum.getDocument() == null)
-            return;
+    if ( streamsDatum == null || streamsDatum.getDocument() == null) {
+      return;
+    }
 
-        LOGGER.debug("Delete Document: {}", streamsDatum.getDocument());
+    LOGGER.debug("Delete Document: {}", streamsDatum.getDocument());
 
-        Map<String, Object> metadata = streamsDatum.getMetadata();
+    Map<String, Object> metadata = streamsDatum.getMetadata();
 
-        LOGGER.debug("Delete Metadata: {}", metadata);
+    LOGGER.debug("Delete Metadata: {}", metadata);
 
-        String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
-        String type = ElasticsearchMetadataUtil.getType(metadata, config);
-        String id = ElasticsearchMetadataUtil.getId(streamsDatum);
+    String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
+    String type = ElasticsearchMetadataUtil.getType(metadata, config);
+    String id = ElasticsearchMetadataUtil.getId(streamsDatum);
 
-        try {
-            delete(index, type, id);
-        } catch (Throwable e) {
-            LOGGER.warn("Unable to Delete Document from ElasticSearch: {}", e.getMessage());
-        }
+    try {
+      delete(index, type, id);
+    } catch (Throwable ex) {
+      LOGGER.warn("Unable to Delete Document from ElasticSearch: {}", ex.getMessage());
     }
+  }
 
-    public void delete(String index, String type, String id) {
-        DeleteRequest deleteRequest;
+  /**
+   * Prepare and en-queue @see org.elasticsearch.action.delete.DeleteRequest
+   * @param index index
+   * @param type type
+   * @param id id
+   */
+  public void delete(String index, String type, String id) {
+    DeleteRequest deleteRequest;
 
-        Preconditions.checkNotNull(index);
-        Preconditions.checkNotNull(id);
-        Preconditions.checkNotNull(type);
+    Preconditions.checkNotNull(index);
+    Preconditions.checkNotNull(id);
+    Preconditions.checkNotNull(type);
 
-        // They didn't specify an ID, so we will create one for them.
-        deleteRequest = new DeleteRequest()
-                .index(index)
-                .type(type)
-                .id(id);
+    // They didn't specify an ID, so we will create one for them.
+    deleteRequest = new DeleteRequest()
+        .index(index)
+        .type(type)
+        .id(id);
 
-        add(deleteRequest);
+    add(deleteRequest);
 
-    }
-
-    public void add(DeleteRequest request) {
+  }
 
-        Preconditions.checkNotNull(request);
-        Preconditions.checkNotNull(request.index());
+  /**
+   * Enqueue DeleteRequest.
+   * @param request request
+   */
+  public void add(DeleteRequest request) {
 
-        // If our queue is larger than our flush threshold, then we should flush the queue.
-        synchronized (this) {
-            checkIndexImplications(request.index());
+    Preconditions.checkNotNull(request);
+    Preconditions.checkNotNull(request.index());
 
-            bulkRequest.add(request);
+    // If our queue is larger than our flush threshold, then we should flush the queue.
+    synchronized (this) {
+      checkIndexImplications(request.index());
 
-            currentBatchItems.incrementAndGet();
+      bulkRequest.add(request);
 
-            checkForFlush();
-        }
+      currentBatchItems.incrementAndGet();
 
+      checkForFlush();
     }
 
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistReader.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistReader.java
index 909f5c4..388497e 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistReader.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistReader.java
@@ -18,14 +18,16 @@
 
 package org.apache.streams.elasticsearch;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Queues;
 import org.apache.streams.core.DatumStatusCounter;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistReader;
 import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Queues;
+
 import org.elasticsearch.search.SearchHit;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
@@ -44,187 +46,188 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+/**
+ * ElasticsearchPersistReader reads documents from elasticsearch.
+ */
 public class ElasticsearchPersistReader implements StreamsPersistReader, Serializable {
 
-    public static final String STREAMS_ID = "ElasticsearchPersistReader";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistReader.class);
-
-    protected volatile Queue<StreamsDatum> persistQueue;
-
-    private ElasticsearchQuery elasticsearchQuery;
-    private ElasticsearchReaderConfiguration config;
-    private int threadPoolSize = 10;
-    private ExecutorService executor;
-    private ReadWriteLock lock = new ReentrantReadWriteLock();
-    private Future<?> readerTask;
-
-    public ElasticsearchPersistReader() {
+  public static final String STREAMS_ID = "ElasticsearchPersistReader";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistReader.class);
+
+  protected volatile Queue<StreamsDatum> persistQueue;
+
+  private ElasticsearchQuery elasticsearchQuery;
+  private ElasticsearchReaderConfiguration config;
+  private int threadPoolSize = 10;
+  private ExecutorService executor;
+  private ReadWriteLock lock = new ReentrantReadWriteLock();
+  private Future<?> readerTask;
+
+  public ElasticsearchPersistReader() {
+  }
+
+  public ElasticsearchPersistReader(ElasticsearchReaderConfiguration config) {
+    this.config = config;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  //PersistReader methods
+  @Override
+  public void startStream() {
+    LOGGER.debug("startStream");
+    executor = Executors.newSingleThreadExecutor();
+    readerTask = executor.submit(new ElasticsearchPersistReaderTask(this, elasticsearchQuery));
+  }
+
+  @Override
+  public void prepare(Object configuration) {
+    elasticsearchQuery = this.config == null ? new ElasticsearchQuery() : new ElasticsearchQuery(config);
+    elasticsearchQuery.execute(configuration);
+    persistQueue = constructQueue();
+  }
+
+  @Override
+  public StreamsResultSet readAll() {
+    return readCurrent();
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+
+    StreamsResultSet current;
+
+    try {
+      lock.writeLock().lock();
+      current = new StreamsResultSet(persistQueue);
+      current.setCounter(new DatumStatusCounter());
+      persistQueue = constructQueue();
+    } finally {
+      lock.writeLock().unlock();
     }
 
-    public ElasticsearchPersistReader(ElasticsearchReaderConfiguration config) {
-        this.config = config;
+    return current;
+
+  }
+
+  //TODO - This just reads current records and does not adjust any queries
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return readCurrent();
+  }
+
+  //TODO - This just reads current records and does not adjust any queries
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return readCurrent();
+  }
+
+  //If we still have data in the queue, we are still running
+  @Override
+  public boolean isRunning() {
+    return persistQueue.size() > 0 || (!readerTask.isDone() && !readerTask.isCancelled());
+  }
+
+  @Override
+  public void cleanUp() {
+    this.shutdownAndAwaitTermination(executor);
+    LOGGER.info("PersistReader done");
+    if ( elasticsearchQuery != null ) {
+      elasticsearchQuery.cleanUp();
     }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    //PersistReader methods
-    @Override
-    public void startStream() {
-        LOGGER.debug("startStream");
-        executor = Executors.newSingleThreadExecutor();
-        readerTask = executor.submit(new ElasticsearchPersistReaderTask(this, elasticsearchQuery));
-    }
-
-    @Override
-    public void prepare(Object o) {
-        elasticsearchQuery = this.config == null ? new ElasticsearchQuery() : new ElasticsearchQuery(config);
-        elasticsearchQuery.execute(o);
-        persistQueue = constructQueue();
-    }
-
-    @Override
-    public StreamsResultSet readAll() {
-        return readCurrent();
+  }
+
+  //The locking may appear to be counter intuitive but we really don't care if multiple threads offer to the queue
+  //as it is a synchronized queue.  What we do care about is that we don't want to be offering to the current reference
+  //if the queue is being replaced with a new instance
+  protected void write(StreamsDatum entry) {
+    boolean success;
+    do {
+      try {
+        lock.readLock().lock();
+        success = persistQueue.offer(entry);
+        Thread.yield();
+      } finally {
+        lock.readLock().unlock();
+      }
     }
-
-    @Override
-    public StreamsResultSet readCurrent() {
-
-        StreamsResultSet current;
-
-        try {
-            lock.writeLock().lock();
-            current = new StreamsResultSet(persistQueue);
-            current.setCounter(new DatumStatusCounter());
-//            current.getCounter().add(countersCurrent);
-//            countersTotal.add(countersCurrent);
-//            countersCurrent = new DatumStatusCounter();
-            persistQueue = constructQueue();
-        } finally {
-            lock.writeLock().unlock();
+    while (!success);
+  }
+
+  protected void shutdownAndAwaitTermination(ExecutorService pool) {
+    pool.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+        pool.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+          LOGGER.error("Pool did not terminate");
         }
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      pool.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
+    }
+  }
 
-        return current;
+  private Queue<StreamsDatum> constructQueue() {
+    return Queues.synchronizedQueue(new LinkedBlockingQueue<StreamsDatum>(10000));
+  }
 
-    }
+  public static class ElasticsearchPersistReaderTask implements Runnable {
 
-    //TODO - This just reads current records and does not adjust any queries
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return readCurrent();
-    }
+    private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistReaderTask.class);
 
-    //TODO - This just reads current records and does not adjust any queries
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return readCurrent();
-    }
+    private ElasticsearchPersistReader reader;
+    private ElasticsearchQuery query;
+    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    //If we still have data in the queue, we are still running
-    @Override
-    public boolean isRunning() {
-        return persistQueue.size() > 0 || (!readerTask.isDone() && !readerTask.isCancelled());
+    public ElasticsearchPersistReaderTask(ElasticsearchPersistReader reader, ElasticsearchQuery query) {
+      this.reader = reader;
+      this.query = query;
     }
 
     @Override
-    public void cleanUp() {
-        this.shutdownAndAwaitTermination(executor);
-        LOGGER.info("PersistReader done");
-        if(elasticsearchQuery != null) {
-            elasticsearchQuery.cleanUp();
-        }
-    }
+    public void run() {
 
-    //The locking may appear to be counter intuitive but we really don't care if multiple threads offer to the queue
-    //as it is a synchronized queue.  What we do care about is that we don't want to be offering to the current reference
-    //if the queue is being replaced with a new instance
-    protected void write(StreamsDatum entry) {
-        boolean success;
-        do {
-            try {
-                lock.readLock().lock();
-                success = persistQueue.offer(entry);
-                Thread.yield();
-            }finally {
-                lock.readLock().unlock();
-            }
-        }
-        while (!success);
-    }
-
-    protected void shutdownAndAwaitTermination(ExecutorService pool) {
-        pool.shutdown(); // Disable new tasks from being submitted
+      StreamsDatum item;
+      while (query.hasNext()) {
+        SearchHit hit = query.next();
+        ObjectNode jsonObject = null;
         try {
-            // Wait a while for existing tasks to terminate
-            if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
-                pool.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!pool.awaitTermination(10, TimeUnit.SECONDS))
-                    LOGGER.error("Pool did not terminate");
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            pool.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
-        }
-    }
-
-    private Queue<StreamsDatum> constructQueue() {
-        return Queues.synchronizedQueue(new LinkedBlockingQueue<StreamsDatum>(10000));
-    }
-
-    public static class ElasticsearchPersistReaderTask implements Runnable {
-
-        private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistReaderTask.class);
-
-        private ElasticsearchPersistReader reader;
-        private ElasticsearchQuery query;
-        private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-        public ElasticsearchPersistReaderTask(ElasticsearchPersistReader reader, ElasticsearchQuery query) {
-            this.reader = reader;
-            this.query = query;
+          jsonObject = mapper.readValue(hit.getSourceAsString(), ObjectNode.class);
+          item = new StreamsDatum(jsonObject, hit.getId());
+          item.getMetadata().put("id", hit.getId());
+          item.getMetadata().put("index", hit.getIndex());
+          item.getMetadata().put("type", hit.getType());
+          if ( hit.fields().containsKey("_timestamp")) {
+            DateTime timestamp = new DateTime(((Long) hit.field("_timestamp").getValue()).longValue());
+            item.setTimestamp(timestamp);
+          }
+          if ( hit.fields().containsKey("_parent")) {
+            item.getMetadata().put("parent", hit.fields().get("_parent").value());
+          }
+          reader.write(item);
+        } catch (IOException ex) {
+          LOGGER.warn("Unable to process json source: ", hit.getSourceAsString());
         }
 
-        @Override
-        public void run() {
-
-            StreamsDatum item;
-            while (query.hasNext()) {
-                SearchHit hit = query.next();
-                ObjectNode jsonObject = null;
-                try {
-                    jsonObject = mapper.readValue(hit.getSourceAsString(), ObjectNode.class);
-                    item = new StreamsDatum(jsonObject, hit.getId());
-                    item.getMetadata().put("id", hit.getId());
-                    item.getMetadata().put("index", hit.getIndex());
-                    item.getMetadata().put("type", hit.getType());
-                    if( hit.fields().containsKey("_timestamp")) {
-                        DateTime timestamp = new DateTime(((Long) hit.field("_timestamp").getValue()).longValue());
-                        item.setTimestamp(timestamp);
-                    }
-                    if( hit.fields().containsKey("_parent")) {
-                        item.getMetadata().put("parent", hit.fields().get("_parent").value());
-                    }
-                    reader.write(item);
-                } catch (IOException e) {
-                    LOGGER.warn("Unable to process json source: ", hit.getSourceAsString());
-                }
-
-            }
-            try {
-                Thread.sleep(new Random().nextInt(100));
-            } catch (InterruptedException e) {
-                LOGGER.warn("Thread interrupted", e);
-            }
+      }
+      try {
+        Thread.sleep(new Random().nextInt(100));
+      } catch (InterruptedException ex) {
+        LOGGER.warn("Thread interrupted", ex);
+      }
 
-        }
     }
+  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistUpdater.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistUpdater.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistUpdater.java
index f712248..f4da436 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistUpdater.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistUpdater.java
@@ -18,111 +18,131 @@
 
 package org.apache.streams.elasticsearch;
 
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+
 import org.elasticsearch.action.update.UpdateRequest;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Map;
 
+/**
+ * ElasticsearchPersistUpdater updates documents to elasticsearch.
+ */
 public class ElasticsearchPersistUpdater extends ElasticsearchPersistWriter implements StreamsPersistWriter {
 
-    public static final String STREAMS_ID = ElasticsearchPersistUpdater.class.getCanonicalName();
+  public static final String STREAMS_ID = ElasticsearchPersistUpdater.class.getCanonicalName();
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistUpdater.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistUpdater.class);
 
-    public ElasticsearchPersistUpdater() {
-        super();
-    }
+  public ElasticsearchPersistUpdater() {
+    super();
+  }
 
-    public ElasticsearchPersistUpdater(ElasticsearchWriterConfiguration config) {
-        super(config);
-    }
+  public ElasticsearchPersistUpdater(ElasticsearchWriterConfiguration config) {
+    super(config);
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public void write(StreamsDatum streamsDatum) {
+  @Override
+  public void write(StreamsDatum streamsDatum) {
 
-        if(streamsDatum == null || streamsDatum.getDocument() == null)
-            return;
+    if (streamsDatum == null || streamsDatum.getDocument() == null) {
+      return;
+    }
 
-        LOGGER.debug("Update Document: {}", streamsDatum.getDocument());
+    LOGGER.debug("Update Document: {}", streamsDatum.getDocument());
 
-        Map<String, Object> metadata = streamsDatum.getMetadata();
+    Map<String, Object> metadata = streamsDatum.getMetadata();
 
-        LOGGER.debug("Update Metadata: {}", metadata);
+    LOGGER.debug("Update Metadata: {}", metadata);
 
-        String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
-        String type = ElasticsearchMetadataUtil.getType(metadata, config);
-        String id = ElasticsearchMetadataUtil.getId(streamsDatum);
-        String parent = ElasticsearchMetadataUtil.getParent(streamsDatum);
-        String routing = ElasticsearchMetadataUtil.getRouting(streamsDatum);
+    String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
+    String type = ElasticsearchMetadataUtil.getType(metadata, config);
+    String id = ElasticsearchMetadataUtil.getId(streamsDatum);
+    String parent = ElasticsearchMetadataUtil.getParent(streamsDatum);
+    String routing = ElasticsearchMetadataUtil.getRouting(streamsDatum);
 
-        try {
+    try {
 
-            String docAsJson = docAsJson(streamsDatum.getDocument());
+      String docAsJson = docAsJson(streamsDatum.getDocument());
 
-            LOGGER.debug("Attempt Update: ({},{},{},{},{}) {}", index, type, id, parent, routing, docAsJson);
+      LOGGER.debug("Attempt Update: ({},{},{},{},{}) {}", index, type, id, parent, routing, docAsJson);
 
-            update(index, type, id, parent, routing, docAsJson);
+      update(index, type, id, parent, routing, docAsJson);
 
-        } catch (Throwable e) {
-            LOGGER.warn("Unable to Update Document in ElasticSearch: {}", e.getMessage());
-        }
+    } catch (Throwable ex) {
+      LOGGER.warn("Unable to Update Document in ElasticSearch: {}", ex.getMessage());
+    }
+  }
+
+  /**
+   * Prepare and en-queue.
+   * @see org.elasticsearch.action.update.UpdateRequest
+   * @param indexName indexName
+   * @param type type
+   * @param id id
+   * @param parent parent
+   * @param routing routing
+   * @param json json
+   */
+  public void update(String indexName, String type, String id, String parent, String routing, String json) {
+    UpdateRequest updateRequest;
+
+    Preconditions.checkNotNull(id);
+    Preconditions.checkNotNull(json);
+
+    // They didn't specify an ID, so we will create one for them.
+    updateRequest = new UpdateRequest()
+        .index(indexName)
+        .type(type)
+        .id(id)
+        .doc(json);
+
+    if (!Strings.isNullOrEmpty(parent)) {
+      updateRequest = updateRequest.parent(parent);
     }
 
-    public void update(String indexName, String type, String id, String parent, String routing, String json) {
-        UpdateRequest updateRequest;
-
-        Preconditions.checkNotNull(id);
-        Preconditions.checkNotNull(json);
-
-        // They didn't specify an ID, so we will create one for them.
-        updateRequest = new UpdateRequest()
-                .index(indexName)
-                .type(type)
-                .id(id)
-                .doc(json);
-
-        if(!Strings.isNullOrEmpty(parent)) {
-            updateRequest = updateRequest.parent(parent);
-        }
-
-        if(!Strings.isNullOrEmpty(routing)) {
-            updateRequest = updateRequest.routing(routing);
-        }
+    if (!Strings.isNullOrEmpty(routing)) {
+      updateRequest = updateRequest.routing(routing);
+    }
 
-        // add fields
-        //updateRequest.docAsUpsert(true);
+    // add fields
+    //updateRequest.docAsUpsert(true);
 
-        add(updateRequest);
+    add(updateRequest);
 
-    }
+  }
 
-    public void add(UpdateRequest request) {
+  /**
+   * Enqueue UpdateRequest.
+   * @param request request
+   */
+  public void add(UpdateRequest request) {
 
-        Preconditions.checkNotNull(request);
-        Preconditions.checkNotNull(request.index());
+    Preconditions.checkNotNull(request);
+    Preconditions.checkNotNull(request.index());
 
-        // If our queue is larger than our flush threshold, then we should flush the queue.
-        synchronized (this) {
-            checkIndexImplications(request.index());
+    // If our queue is larger than our flush threshold, then we should flush the queue.
+    synchronized (this) {
+      checkIndexImplications(request.index());
 
-            bulkRequest.add(request);
+      bulkRequest.add(request);
 
-            currentBatchBytes.addAndGet(request.doc().source().length());
-            currentBatchItems.incrementAndGet();
-
-            checkForFlush();
-        }
+      currentBatchBytes.addAndGet(request.doc().source().length());
+      currentBatchItems.incrementAndGet();
 
+      checkForFlush();
     }
 
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java
index 8f9c7d7..07ab734 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchPersistWriter.java
@@ -19,14 +19,16 @@
 
 package org.apache.streams.elasticsearch;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Preconditions;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Preconditions;
+
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
 import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
@@ -55,495 +57,581 @@ import java.util.TimerTask;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
+/**
+ * ElasticsearchPersistUpdater updates documents to elasticsearch.
+ */
 public class ElasticsearchPersistWriter implements StreamsPersistWriter, Serializable {
 
-    public static final String STREAMS_ID = ElasticsearchPersistWriter.class.getCanonicalName();
+  public static final String STREAMS_ID = ElasticsearchPersistWriter.class.getCanonicalName();
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistWriter.class);
-    private static final NumberFormat MEGABYTE_FORMAT = new DecimalFormat("#.##");
-    private static final NumberFormat NUMBER_FORMAT = new DecimalFormat("###,###,###,###");
-    private static final Long DEFAULT_BULK_FLUSH_THRESHOLD = 5L * 1024L * 1024L;
-    private static final int DEFAULT_BATCH_SIZE = 100;
-    //ES defaults its bulk index queue to 50 items.  We want to be under this on our backoff so set this to 1/2 ES default
-    //at a batch size as configured here.
-    private static final long WAITING_DOCS_LIMIT = DEFAULT_BATCH_SIZE * 25;
-    //A document should have to wait no more than 10s to get flushed
-    private static final long DEFAULT_MAX_WAIT = 10000;
+  private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistWriter.class);
+  private static final NumberFormat MEGABYTE_FORMAT = new DecimalFormat("#.##");
+  private static final NumberFormat NUMBER_FORMAT = new DecimalFormat("###,###,###,###");
+  private static final Long DEFAULT_BULK_FLUSH_THRESHOLD = 5L * 1024L * 1024L;
+  private static final int DEFAULT_BATCH_SIZE = 100;
+  //ES defaults its bulk index queue to 50 items.  We want to be under this on our backoff so set this to 1/2 ES default
+  //at a batch size as configured here.
+  private static final long WAITING_DOCS_LIMIT = DEFAULT_BATCH_SIZE * 25;
+  //A document should have to wait no more than 10s to get flushed
+  private static final long DEFAULT_MAX_WAIT = 10000;
 
-    protected static final ObjectMapper OBJECT_MAPPER = StreamsJacksonMapper.getInstance();
+  protected static final ObjectMapper OBJECT_MAPPER = StreamsJacksonMapper.getInstance();
 
-    protected final List<String> affectedIndexes = new ArrayList<>();
+  protected final List<String> affectedIndexes = new ArrayList<>();
 
-    protected final ElasticsearchClientManager manager;
-    protected final ElasticsearchWriterConfiguration config;
+  protected final ElasticsearchClientManager manager;
+  protected final ElasticsearchWriterConfiguration config;
 
-    protected BulkRequestBuilder bulkRequest;
+  protected BulkRequestBuilder bulkRequest;
 
-    private boolean veryLargeBulk = false;  // by default this setting is set to false
-    private long flushThresholdsRecords = DEFAULT_BATCH_SIZE;
-    private long flushThresholdBytes = DEFAULT_BULK_FLUSH_THRESHOLD;
+  private boolean veryLargeBulk = false;  // by default this setting is set to false
+  private long flushThresholdsRecords = DEFAULT_BATCH_SIZE;
+  private long flushThresholdBytes = DEFAULT_BULK_FLUSH_THRESHOLD;
 
-    private long flushThresholdTime = DEFAULT_MAX_WAIT;
-    private long lastFlush = new Date().getTime();
-    private Timer timer = new Timer();
+  private long flushThresholdTime = DEFAULT_MAX_WAIT;
+  private long lastFlush = new Date().getTime();
+  private Timer timer = new Timer();
 
 
-    private final AtomicInteger batchesSent = new AtomicInteger(0);
-    private final AtomicInteger batchesResponded = new AtomicInteger(0);
+  private final AtomicInteger batchesSent = new AtomicInteger(0);
+  private final AtomicInteger batchesResponded = new AtomicInteger(0);
 
-    protected final AtomicLong currentBatchItems = new AtomicLong(0);
-    protected final AtomicLong currentBatchBytes = new AtomicLong(0);
+  protected final AtomicLong currentBatchItems = new AtomicLong(0);
+  protected final AtomicLong currentBatchBytes = new AtomicLong(0);
 
-    private final AtomicLong totalSent = new AtomicLong(0);
-    private final AtomicLong totalSeconds = new AtomicLong(0);
-    private final AtomicLong totalOk = new AtomicLong(0);
-    private final AtomicLong totalFailed = new AtomicLong(0);
-    private final AtomicLong totalSizeInBytes = new AtomicLong(0);
+  private final AtomicLong totalSent = new AtomicLong(0);
+  private final AtomicLong totalSeconds = new AtomicLong(0);
+  private final AtomicLong totalOk = new AtomicLong(0);
+  private final AtomicLong totalFailed = new AtomicLong(0);
+  private final AtomicLong totalSizeInBytes = new AtomicLong(0);
 
-    public ElasticsearchPersistWriter() {
-        this(new ComponentConfigurator<>(ElasticsearchWriterConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch")));
-    }
-
-    public ElasticsearchPersistWriter(ElasticsearchWriterConfiguration config) {
-        this(config, new ElasticsearchClientManager(config));
-    }
+  public ElasticsearchPersistWriter() {
+    this(new ComponentConfigurator<>(ElasticsearchWriterConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch")));
+  }
 
-    public ElasticsearchPersistWriter(ElasticsearchWriterConfiguration config, ElasticsearchClientManager manager) {
-        this.config = config;
-        this.manager = manager;
-        this.bulkRequest = this.manager.getClient().prepareBulk();
-    }
+  public ElasticsearchPersistWriter(ElasticsearchWriterConfiguration config) {
+    this(config, new ElasticsearchClientManager(config));
+  }
 
-    public long getBatchesSent()                            { return this.batchesSent.get(); }
-    public long getBatchesResponded()                       { return batchesResponded.get(); }
+  /**
+   * ElasticsearchPersistWriter constructor.
+   * @param config config
+   * @param manager manager
+   */
+  public ElasticsearchPersistWriter(ElasticsearchWriterConfiguration config, ElasticsearchClientManager manager) {
+    this.config = config;
+    this.manager = manager;
+    this.bulkRequest = this.manager.getClient().prepareBulk();
+  }
 
+  public long getBatchesSent() {
+    return this.batchesSent.get();
+  }
 
-    public long getFlushThresholdsRecords()                 { return this.flushThresholdsRecords; }
-    public long getFlushThresholdBytes()                    { return this.flushThresholdBytes; }
-    public long getFlushThreasholdMaxTime()                 { return this.flushThresholdTime; }
+  public long getBatchesResponded() {
+    return batchesResponded.get();
+  }
 
-    public void setFlushThresholdRecords(long val)          { this.flushThresholdsRecords = val; }
-    public void setFlushThresholdBytes(long val)            { this.flushThresholdBytes = val; }
-    public void setFlushThreasholdMaxTime(long val)         { this.flushThresholdTime = val; }
-    public void setVeryLargeBulk(boolean veryLargeBulk)     { this.veryLargeBulk = veryLargeBulk; }
+  public long getFlushThresholdsRecords() {
+    return this.flushThresholdsRecords;
+  }
 
-    private long getLastFlush()                             { return this.lastFlush; }
+  public long getFlushThresholdBytes() {
+    return this.flushThresholdBytes;
+  }
 
-    public long getTotalOutstanding()                       { return this.totalSent.get() - (this.totalFailed.get() + this.totalOk.get()); }
-    public long getTotalSent()                              { return this.totalSent.get(); }
-    public long getTotalOk()                                { return this.totalOk.get(); }
-    public long getTotalFailed()                            { return this.totalFailed.get(); }
-    public long getTotalSizeInBytes()                       { return this.totalSizeInBytes.get(); }
-    public long getTotalSeconds()                           { return this.totalSeconds.get(); }
-    public List<String> getAffectedIndexes()                { return this.affectedIndexes; }
+  public long getFlushThreasholdMaxTime() {
+    return this.flushThresholdTime;
+  }
 
-    public boolean isConnected()                            { return (this.manager.getClient() != null); }
+  public void setFlushThresholdRecords(long val) {
+    this.flushThresholdsRecords = val;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public void setFlushThresholdBytes(long val) {
+    this.flushThresholdBytes = val;
+  }
 
-    @Override
-    public void write(StreamsDatum streamsDatum) {
-        if(streamsDatum == null || streamsDatum.getDocument() == null)
-            return;
+  public void setFlushThreasholdMaxTime(long val) {
+    this.flushThresholdTime = val;
+  }
 
-        checkForBackOff();
+  public void setVeryLargeBulk(boolean veryLargeBulk) {
+    this.veryLargeBulk = veryLargeBulk;
+  }
 
-        LOGGER.debug("Write Document: {}", streamsDatum.getDocument());
+  private long getLastFlush() {
+    return this.lastFlush;
+  }
+
+  public long getTotalOutstanding() {
+    return this.totalSent.get() - (this.totalFailed.get() + this.totalOk.get());
+  }
 
-        Map<String, Object> metadata = streamsDatum.getMetadata();
+  public long getTotalSent() {
+    return this.totalSent.get();
+  }
 
-        LOGGER.debug("Write Metadata: {}", metadata);
+  public long getTotalOk() {
+    return this.totalOk.get();
+  }
 
-        String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
-        String type = ElasticsearchMetadataUtil.getType(metadata, config);
-        String id = ElasticsearchMetadataUtil.getId(streamsDatum);
-        String parent = ElasticsearchMetadataUtil.getParent(streamsDatum);
-        String routing = ElasticsearchMetadataUtil.getRouting(streamsDatum);
+  public long getTotalFailed() {
+    return this.totalFailed.get();
+  }
 
-        try {
-            streamsDatum = appendMetadata(streamsDatum);
-            String docAsJson = docAsJson(streamsDatum.getDocument());
-            add(index, type, id, parent, routing,
-                    streamsDatum.getTimestamp() == null ? Long.toString(DateTime.now().getMillis()) : Long.toString(streamsDatum.getTimestamp().getMillis()),
-                    docAsJson);
-        } catch (Throwable e) {
-            LOGGER.warn("Unable to Write Datum to ElasticSearch: {}", e.getMessage());
-        }
+  public long getTotalSizeInBytes() {
+    return this.totalSizeInBytes.get();
+  }
+
+  public long getTotalSeconds() {
+    return this.totalSeconds.get();
+  }
+
+  public List<String> getAffectedIndexes() {
+    return this.affectedIndexes;
+  }
+
+  public boolean isConnected() {
+    return (this.manager.getClient() != null);
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void write(StreamsDatum streamsDatum) {
+
+    if (streamsDatum == null || streamsDatum.getDocument() == null) {
+      return;
     }
 
-    protected String docAsJson(Object streamsDocument) throws IOException {
-        return (streamsDocument instanceof String) ? streamsDocument.toString() : OBJECT_MAPPER.writeValueAsString(streamsDocument);
-    }
+    checkForBackOff();
 
-    protected StreamsDatum appendMetadata(StreamsDatum streamsDatum) throws IOException {
+    LOGGER.debug("Write Document: {}", streamsDatum.getDocument());
 
-        String docAsJson = (streamsDatum.getDocument() instanceof String) ? streamsDatum.getDocument().toString() : OBJECT_MAPPER.writeValueAsString(streamsDatum.getDocument());
+    Map<String, Object> metadata = streamsDatum.getMetadata();
 
-        if(streamsDatum.getMetadata() == null || streamsDatum.getMetadata().size() == 0)
-            return streamsDatum;
-        else {
-            ObjectNode node = (ObjectNode)OBJECT_MAPPER.readTree(docAsJson);
-            node.put("_metadata", OBJECT_MAPPER.readTree(OBJECT_MAPPER.writeValueAsBytes(streamsDatum.getMetadata())));
-            streamsDatum.setDocument(OBJECT_MAPPER.writeValueAsString(node));
-            return streamsDatum;
-        }
+    LOGGER.debug("Write Metadata: {}", metadata);
+
+    String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
+    String type = ElasticsearchMetadataUtil.getType(metadata, config);
+    String id = ElasticsearchMetadataUtil.getId(streamsDatum);
+    String parent = ElasticsearchMetadataUtil.getParent(streamsDatum);
+    String routing = ElasticsearchMetadataUtil.getRouting(streamsDatum);
+
+    try {
+      streamsDatum = appendMetadata(streamsDatum);
+      String docAsJson = docAsJson(streamsDatum.getDocument());
+      add(index, type, id, parent, routing,
+          streamsDatum.getTimestamp() == null ? Long.toString(DateTime.now().getMillis()) : Long.toString(streamsDatum.getTimestamp().getMillis()),
+          docAsJson);
+    } catch (Throwable ex) {
+      LOGGER.warn("Unable to Write Datum to ElasticSearch: {}", ex.getMessage());
     }
+  }
 
-    public void cleanUp() {
+  protected String docAsJson(Object streamsDocument) throws IOException {
+    return (streamsDocument instanceof String) ? streamsDocument.toString() : OBJECT_MAPPER.writeValueAsString(streamsDocument);
+  }
 
-        try {
+  protected StreamsDatum appendMetadata(StreamsDatum streamsDatum) throws IOException {
 
-            LOGGER.debug("cleanUp started");
+    String docAsJson = (streamsDatum.getDocument() instanceof String) ? streamsDatum.getDocument().toString() : OBJECT_MAPPER.writeValueAsString(streamsDatum.getDocument());
 
-            // before they close, check to ensure that
-            flushInternal();
+    if (streamsDatum.getMetadata() == null || streamsDatum.getMetadata().size() == 0) {
+      return streamsDatum;
+    } else {
+      ObjectNode node = (ObjectNode)OBJECT_MAPPER.readTree(docAsJson);
+      node.put("_metadata", OBJECT_MAPPER.readTree(OBJECT_MAPPER.writeValueAsBytes(streamsDatum.getMetadata())));
+      streamsDatum.setDocument(OBJECT_MAPPER.writeValueAsString(node));
+      return streamsDatum;
+    }
+  }
 
-            LOGGER.debug("flushInternal completed");
+  @Override
+  public void cleanUp() {
 
-            waitToCatchUp(0, 5 * 60 * 1000);
+    try {
 
-            LOGGER.debug("waitToCatchUp completed");
+      LOGGER.debug("cleanUp started");
 
-        } catch (Throwable e) {
-            // this line of code should be logically unreachable.
-            LOGGER.warn("This is unexpected: {}", e);
-        } finally {
+      // before they close, check to ensure that
+      flushInternal();
 
-            if(veryLargeBulk) {
-                resetRefreshInterval();
-            }
+      LOGGER.debug("flushInternal completed");
 
-            if( config.getRefresh() ) {
-                refreshIndexes();
-                LOGGER.debug("refreshIndexes completed");
-            }
+      waitToCatchUp(0, 5 * 60 * 1000);
 
-            LOGGER.debug("Closed ElasticSearch Writer: Ok[{}] Failed[{}] Orphaned[{}]",
-              this.totalOk.get(), this.totalFailed.get(), this.getTotalOutstanding());
-            timer.cancel();
+      LOGGER.debug("waitToCatchUp completed");
 
-            LOGGER.debug("cleanUp completed");
-        }
-    }
+    } catch (Throwable ex) {
+      // this line of code should be logically unreachable.
+      LOGGER.warn("This is unexpected: {}", ex);
+    } finally {
 
-    private void resetRefreshInterval() {
-        for (String indexName : this.affectedIndexes) {
-
-            if (this.veryLargeBulk) {
-                LOGGER.debug("Resetting our Refresh Interval: {}", indexName);
-                // They are in 'very large bulk' mode and the process is finished. We now want to turn the
-                // refreshing back on.
-                UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indexName);
-                updateSettingsRequest.settings(Settings.settingsBuilder().put("refresh_interval", "5s"));
-
-                // submit to ElasticSearch
-                this.manager.getClient()
-                        .admin()
-                        .indices()
-                        .updateSettings(updateSettingsRequest)
-                        .actionGet();
-            }
-        }
-    }
+      if (veryLargeBulk) {
+        resetRefreshInterval();
+      }
 
-    private void refreshIndexes() {
-        for (String indexName : this.affectedIndexes) {
-
-            if (config.getRefresh()) {
-                LOGGER.debug("Refreshing ElasticSearch index: {}", indexName);
-                this.manager.getClient()
-                        .admin()
-                        .indices()
-                        .prepareRefresh(indexName)
-                        .execute()
-                        .actionGet();
-            }
-        }
-    }
+      if ( config.getRefresh() ) {
+        refreshIndexes();
+        LOGGER.debug("refreshIndexes completed");
+      }
 
-    private synchronized void flushInternal() {
-        // we do not have a working bulk request, we can just exit here.
-        if (this.bulkRequest == null || this.currentBatchItems.get() == 0)
-            return;
+      LOGGER.debug("Closed ElasticSearch Writer: Ok[{}] Failed[{}] Orphaned[{}]",
+          this.totalOk.get(), this.totalFailed.get(), this.getTotalOutstanding());
+      timer.cancel();
 
-        // wait for one minute to catch up if it needs to
-        waitToCatchUp(5, 60 * 1000);
+      LOGGER.debug("cleanUp completed");
+    }
+  }
+
+  private void resetRefreshInterval() {
+    for (String indexName : this.affectedIndexes) {
+
+      if (this.veryLargeBulk) {
+        LOGGER.debug("Resetting our Refresh Interval: {}", indexName);
+        // They are in 'very large bulk' mode and the process is finished. We now want to turn the
+        // refreshing back on.
+        UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indexName);
+        updateSettingsRequest.settings(Settings.settingsBuilder().put("refresh_interval", "5s"));
+
+        // submit to ElasticSearch
+        this.manager.getClient()
+            .admin()
+            .indices()
+            .updateSettings(updateSettingsRequest)
+            .actionGet();
+      }
+    }
+  }
 
-        // call the flush command.
-        flush(this.bulkRequest, this.currentBatchItems.get(), this.currentBatchBytes.get());
+  private void refreshIndexes() {
 
-        // reset the current batch statistics
-        this.currentBatchItems.set(0);
-        this.currentBatchBytes.set(0);
+    for (String indexName : this.affectedIndexes) {
 
-        // reset our bulk request builder
-        this.bulkRequest = this.manager.getClient().prepareBulk();
+      if (config.getRefresh()) {
+        LOGGER.debug("Refreshing ElasticSearch index: {}", indexName);
+        this.manager.getClient()
+            .admin()
+            .indices()
+            .prepareRefresh(indexName)
+            .execute()
+            .actionGet();
+      }
     }
+  }
 
-    private synchronized void waitToCatchUp(int batchThreshold, int timeOutThresholdInMS) {
-        int counter = 0;
-        // If we still have 5 batches outstanding, we need to give it a minute to catch up
-        while(this.getBatchesSent() - this.getBatchesResponded() > batchThreshold && counter < timeOutThresholdInMS) {
-            try {
-                Thread.yield();
-                Thread.sleep(1);
-                counter++;
-            } catch(InterruptedException ie) {
-                LOGGER.warn("Catchup was interrupted.  Data may be lost");
-                return;
-            }
-        }
+  private synchronized void flushInternal() {
+    // we do not have a working bulk request, we can just exit here.
+    if (this.bulkRequest == null || this.currentBatchItems.get() == 0) {
+      return;
     }
 
-    private void checkForBackOff() {
-        try {
-            if (this.getTotalOutstanding() > WAITING_DOCS_LIMIT) {
-                /*
-                 * Author:
-                 * Smashew
-                 *
-                 * Date:
-                 * 2013-10-20
-                 *
-                 * Note:
-                 * With the information that we have on hand. We need to develop a heuristic
-                 * that will determine when the cluster is having a problem indexing records
-                 * by telling it to pause and wait for it to catch back up. A
-                 *
-                 * There is an impact to us, the caller, whenever this happens as well. Items
-                 * that are not yet fully indexed by the server sit in a queue, on the client
-                 * that can cause the heap to overflow. This has been seen when re-indexing
-                 * large amounts of data to a small cluster. The "deletes" + "indexes" can
-                 * cause the server to have many 'outstandingItems" in queue. Running this
-                 * software with large amounts of data, on a small cluster, can re-create
-                 * this problem.
-                 *
-                 * DO NOT DELETE THESE LINES
-                 ****************************************************************************/
-
-                // wait for the flush to catch up. We are going to cap this at
-                int count = 0;
-                while (this.getTotalOutstanding() > WAITING_DOCS_LIMIT && count++ < 500)
-                    Thread.sleep(10);
-
-                if (this.getTotalOutstanding() > WAITING_DOCS_LIMIT)
-                    LOGGER.warn("Even after back-off there are {} items still in queue.", this.getTotalOutstanding());
-            }
-        } catch (Exception e) {
-            LOGGER.warn("We were broken from our loop: {}", e.getMessage());
+    // wait for one minute to catch up if it needs to
+    waitToCatchUp(5, 60 * 1000);
+
+    // call the flush command.
+    flush(this.bulkRequest, this.currentBatchItems.get(), this.currentBatchBytes.get());
+
+    // reset the current batch statistics
+    this.currentBatchItems.set(0);
+    this.currentBatchBytes.set(0);
+
+    // reset our bulk request builder
+    this.bulkRequest = this.manager.getClient().prepareBulk();
+  }
+
+  private synchronized void waitToCatchUp(int batchThreshold, int timeOutThresholdInMS) {
+    int counter = 0;
+    // If we still have 5 batches outstanding, we need to give it a minute to catch up
+    while (this.getBatchesSent() - this.getBatchesResponded() > batchThreshold && counter < timeOutThresholdInMS) {
+      try {
+        Thread.yield();
+        Thread.sleep(1);
+        counter++;
+      } catch (InterruptedException ie) {
+        LOGGER.warn("Catchup was interrupted.  Data may be lost");
+        return;
+      }
+    }
+  }
+
+  private void checkForBackOff() {
+    try {
+      if (this.getTotalOutstanding() > WAITING_DOCS_LIMIT) {
+        /*
+         * Author:
+         * Smashew
+         *
+         * Date:
+         * 2013-10-20
+         *
+         * Note:
+         * With the information that we have on hand. We need to develop a heuristic
+         * that will determine when the cluster is having a problem indexing records
+         * by telling it to pause and wait for it to catch back up. A
+         *
+         * There is an impact to us, the caller, whenever this happens as well. Items
+         * that are not yet fully indexed by the server sit in a queue, on the client
+         * that can cause the heap to overflow. This has been seen when re-indexing
+         * large amounts of data to a small cluster. The "deletes" + "indexes" can
+         * cause the server to have many 'outstandingItems" in queue. Running this
+         * software with large amounts of data, on a small cluster, can re-create
+         * this problem.
+         *
+         * DO NOT DELETE THESE LINES
+         ****************************************************************************/
+
+        // wait for the flush to catch up. We are going to cap this at
+        int count = 0;
+        while (this.getTotalOutstanding() > WAITING_DOCS_LIMIT && count++ < 500) {
+          Thread.sleep(10);
         }
+        if (this.getTotalOutstanding() > WAITING_DOCS_LIMIT) {
+          LOGGER.warn("Even after back-off there are {} items still in queue.", this.getTotalOutstanding());
+        }
+      }
+    } catch (Exception ex) {
+      LOGGER.warn("We were broken from our loop: {}", ex.getMessage());
     }
-
-    public void add(String indexName, String type, String id, String ts, String json) {
-        add(indexName, type, id, null, null, ts, json);
+  }
+
+  /**
+   * add based on supplied parameters.
+   * @param indexName indexName
+   * @param type type
+   * @param id id
+   * @param ts ts
+   * @param json json
+   */
+  public void add(String indexName, String type, String id, String ts, String json) {
+    add(indexName, type, id, null, null, ts, json);
+  }
+
+  /**
+   * add based on supplied parameters.
+   * @param indexName indexName
+   * @param type type
+   * @param id id
+   * @param routing routing
+   * @param ts ts
+   * @param json json
+   */
+  public void add(String indexName, String type, String id, String parent, String routing, String ts, String json) {
+
+    // make sure that these are not null
+    Preconditions.checkNotNull(indexName);
+    Preconditions.checkNotNull(type);
+    Preconditions.checkNotNull(json);
+
+    IndexRequestBuilder indexRequestBuilder = manager.getClient()
+        .prepareIndex(indexName, type)
+        .setSource(json);
+
+    // / They didn't specify an ID, so we will create one for them.
+    if (id != null) {
+      indexRequestBuilder.setId(id);
     }
-
-    public void add(String indexName, String type, String id, String parent, String routing, String ts, String json) {
-
-        // make sure that these are not null
-        Preconditions.checkNotNull(indexName);
-        Preconditions.checkNotNull(type);
-        Preconditions.checkNotNull(json);
-
-        IndexRequestBuilder indexRequestBuilder = manager.getClient()
-                .prepareIndex(indexName, type)
-                .setSource(json);
-
-        // / They didn't specify an ID, so we will create one for them.
-        if(id != null)
-            indexRequestBuilder.setId(id);
-
-        if(ts != null)
-            indexRequestBuilder.setTimestamp(ts);
-
-        if(parent != null)
-            indexRequestBuilder.setParent(parent);
-
-        if(routing != null)
-            indexRequestBuilder.setRouting(routing);
-
-        add(indexRequestBuilder.request());
+    if (ts != null) {
+      indexRequestBuilder.setTimestamp(ts);
+    }
+    if (parent != null) {
+      indexRequestBuilder.setParent(parent);
     }
+    if (routing != null) {
+      indexRequestBuilder.setRouting(routing);
+    }
+    add(indexRequestBuilder.request());
+  }
 
-    protected void add(IndexRequest request) {
+  protected void add(IndexRequest request) {
 
-        Preconditions.checkNotNull(request);
-        Preconditions.checkNotNull(request.index());
+    Preconditions.checkNotNull(request);
+    Preconditions.checkNotNull(request.index());
 
-        // If our queue is larger than our flush threshold, then we should flush the queue.
-        synchronized (this) {
-            checkIndexImplications(request.index());
+    // If our queue is larger than our flush threshold, then we should flush the queue.
+    synchronized (this) {
+      checkIndexImplications(request.index());
 
-            bulkRequest.add(request);
+      bulkRequest.add(request);
 
-            this.currentBatchBytes.addAndGet(request.source().length());
-            this.currentBatchItems.incrementAndGet();
+      this.currentBatchBytes.addAndGet(request.source().length());
+      this.currentBatchItems.incrementAndGet();
 
-            checkForFlush();
-        }
+      checkForFlush();
     }
-
-    protected void checkForFlush() {
-        synchronized (this) {
-            if (this.currentBatchBytes.get() >= this.flushThresholdBytes ||
-                    this.currentBatchItems.get() >= this.flushThresholdsRecords ||
-                    new Date().getTime() - this.lastFlush >= this.flushThresholdTime) {
-                // We should flush
-                flushInternal();
-            }
-        }
+  }
+
+  protected void checkForFlush() {
+    synchronized (this) {
+      if (this.currentBatchBytes.get() >= this.flushThresholdBytes
+          ||
+          this.currentBatchItems.get() >= this.flushThresholdsRecords
+          ||
+          new Date().getTime() - this.lastFlush >= this.flushThresholdTime) {
+        // We should flush
+        flushInternal();
+      }
     }
+  }
 
-    protected void checkIndexImplications(String indexName) {
-        // We need this to be safe across all writers that are currently being executed
-        synchronized (ElasticsearchPersistWriter.class) {
+  protected void checkIndexImplications(String indexName) {
+    // We need this to be safe across all writers that are currently being executed
+    synchronized (ElasticsearchPersistWriter.class) {
 
-            // this will be common if we have already verified the index.
-            if (this.affectedIndexes.contains(indexName))
-                return;
+      // this will be common if we have already verified the index.
+      if (this.affectedIndexes.contains(indexName)) {
+        return;
+      }
 
+      // create the index if it is missing
+      createIndexIfMissing(indexName);
 
-            // create the index if it is missing
-            createIndexIfMissing(indexName);
+      // we haven't log this index.
+      this.affectedIndexes.add(indexName);
 
-            // we haven't log this index.
-            this.affectedIndexes.add(indexName);
-
-        }
     }
-
-    protected void disableRefresh() {
-
-        for (String indexName : this.affectedIndexes) {
-            // They are in 'very large bulk' mode we want to turn off refreshing the index.
-            // Create a request then add the setting to tell it to stop refreshing the interval
-            UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indexName);
-            updateSettingsRequest.settings(Settings.settingsBuilder().put("refresh_interval", -1));
-
-            // submit to ElasticSearch
-            this.manager.getClient()
-                    .admin()
-                    .indices()
-                    .updateSettings(updateSettingsRequest)
-                    .actionGet();
-        }
+  }
+
+  protected void disableRefresh() {
+
+    for (String indexName : this.affectedIndexes) {
+      // They are in 'very large bulk' mode we want to turn off refreshing the index.
+      // Create a request then add the setting to tell it to stop refreshing the interval
+      UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indexName);
+      updateSettingsRequest.settings(Settings.settingsBuilder().put("refresh_interval", -1));
+
+      // submit to ElasticSearch
+      this.manager.getClient()
+          .admin()
+          .indices()
+          .updateSettings(updateSettingsRequest)
+          .actionGet();
     }
-
-    public void createIndexIfMissing(String indexName) {
-        // Synchronize this on a static class level
-        if (!this.manager.getClient()
-                .admin()
-                .indices()
-                .exists(new IndicesExistsRequest(indexName))
-                .actionGet()
-                .isExists())
-        {
-            // It does not exist... So we are going to need to create the index.
-            // we are going to assume that the 'templates' that we have loaded into
-            // elasticsearch are sufficient to ensure the index is being created properly.
-            CreateIndexResponse response = this.manager.getClient().admin().indices().create(new CreateIndexRequest(indexName)).actionGet();
-
-            if (response.isAcknowledged()) {
-                LOGGER.info("Index Created: {}", indexName);
-            } else {
-                LOGGER.error("Index {} did not exist. While attempting to create the index from stored ElasticSearch Templates we were unable to get an acknowledgement.", indexName);
-                LOGGER.error("Error Message: {}", response.toString());
-                throw new RuntimeException("Unable to create index " + indexName);
-            }
-        }
+  }
+
+  /**
+   * createIndexIfMissing
+   * @param indexName indexName
+   */
+  public void createIndexIfMissing(String indexName) {
+    // Synchronize this on a static class level
+    if (!this.manager.getClient()
+        .admin()
+        .indices()
+        .exists(new IndicesExistsRequest(indexName))
+        .actionGet()
+        .isExists()) {
+      // It does not exist... So we are going to need to create the index.
+      // we are going to assume that the 'templates' that we have loaded into
+      // elasticsearch are sufficient to ensure the index is being created properly.
+      CreateIndexResponse response = this.manager.getClient().admin().indices().create(new CreateIndexRequest(indexName)).actionGet();
+
+      if (response.isAcknowledged()) {
+        LOGGER.info("Index Created: {}", indexName);
+      } else {
+        LOGGER.error("Index {} did not exist. While attempting to create the index from stored ElasticSearch Templates we were unable to get an acknowledgement.", indexName);
+        LOGGER.error("Error Message: {}", response.toString());
+        throw new RuntimeException("Unable to create index " + indexName);
+      }
     }
-
-    public void prepare(Object configurationObject) {
-        this.veryLargeBulk = config.getBulk() == null ?
-                Boolean.FALSE :
-                config.getBulk();
-
-        this.flushThresholdsRecords = config.getBatchSize() == null ?
-                DEFAULT_BATCH_SIZE :
-                (int)(config.getBatchSize().longValue());
-
-        this.flushThresholdTime = config.getMaxTimeBetweenFlushMs() != null && config.getMaxTimeBetweenFlushMs() > 0 ?
-                config.getMaxTimeBetweenFlushMs() :
-                DEFAULT_MAX_WAIT;
-
-        this.flushThresholdBytes = config.getBatchBytes() == null ?
-                DEFAULT_BULK_FLUSH_THRESHOLD :
-                config.getBatchBytes();
-
-        timer.scheduleAtFixedRate(new TimerTask() {
-            public void run() {
-                checkForFlush();
-            }
-        }, this.flushThresholdTime, this.flushThresholdTime);
-
-        if( veryLargeBulk )
-            disableRefresh();
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.veryLargeBulk = config.getBulk() == null
+        ? Boolean.FALSE
+        : config.getBulk();
+
+    this.flushThresholdsRecords = config.getBatchSize() == null
+        ? DEFAULT_BATCH_SIZE
+        : (int)(config.getBatchSize().longValue());
+
+    this.flushThresholdTime = config.getMaxTimeBetweenFlushMs() != null && config.getMaxTimeBetweenFlushMs() > 0
+        ? config.getMaxTimeBetweenFlushMs()
+        : DEFAULT_MAX_WAIT;
+
+    this.flushThresholdBytes = config.getBatchBytes() == null
+        ? DEFAULT_BULK_FLUSH_THRESHOLD
+        : config.getBatchBytes();
+
+    timer.scheduleAtFixedRate(new TimerTask() {
+      public void run() {
+        checkForFlush();
+      }
+    }, this.flushThresholdTime, this.flushThresholdTime);
+
+    if ( veryLargeBulk ) {
+      disableRefresh();
     }
+  }
 
-    private void flush(final BulkRequestBuilder bulkRequest, final Long sent, final Long sizeInBytes) {
-        LOGGER.debug("Writing to ElasticSearch: Items[{}] Size[{} mb]", sent, MEGABYTE_FORMAT.format(sizeInBytes / (double) (1024 * 1024)));
+  private void flush(final BulkRequestBuilder bulkRequest, final Long sent, final Long sizeInBytes) {
+    LOGGER.debug("Writing to ElasticSearch: Items[{}] Size[{} mb]", sent, MEGABYTE_FORMAT.format(sizeInBytes / (double) (1024 * 1024)));
 
 
-        // record the last time we flushed the index
-        this.lastFlush = new Date().getTime();
+    // record the last time we flushed the index
+    this.lastFlush = new Date().getTime();
 
-        // add the totals
-        this.totalSent.addAndGet(sent);
+    // add the totals
+    this.totalSent.addAndGet(sent);
 
-        // add the total number of batches sent
-        this.batchesSent.incrementAndGet();
+    // add the total number of batches sent
+    this.batchesSent.incrementAndGet();
 
-        try {
-            bulkRequest.execute().addListener(new ActionListener<BulkResponse>() {
-                public void onResponse(BulkResponse bulkItemResponses) {
-                    batchesResponded.incrementAndGet();
-                    updateTotals(bulkItemResponses, sent, sizeInBytes);
-                }
-
-                public void onFailure(Throwable throwable) {
-                    batchesResponded.incrementAndGet();
-                    throwable.printStackTrace();
-                }
-            });
-        }
-        catch(Throwable e) {
-            LOGGER.error("There was an error sending the batch: {}", e.getMessage());
+    try {
+      bulkRequest.execute().addListener(new ActionListener<BulkResponse>() {
+        public void onResponse(BulkResponse bulkItemResponses) {
+          batchesResponded.incrementAndGet();
+          updateTotals(bulkItemResponses, sent, sizeInBytes);
         }
-    }
 
-    private void updateTotals(final BulkResponse bulkItemResponses, final Long sent, final Long sizeInBytes) {
-        long failed = 0;
-        long passed = 0;
-        long millis = bulkItemResponses.getTookInMillis();
-
-        // keep track of the number of totalFailed and items that we have totalOk.
-        for (BulkItemResponse resp : bulkItemResponses.getItems()) {
-            if (resp == null || resp.isFailed()) {
-                failed++;
-                LOGGER.debug("{} ({},{},{}) failed: {}", resp.getOpType(), resp.getIndex(), resp.getType(), resp.getId(), resp.getFailureMessage());
-            }
-            else
-                passed++;
+        public void onFailure(Throwable throwable) {
+          batchesResponded.incrementAndGet();
+          throwable.printStackTrace();
         }
+      });
+    } catch (Throwable ex) {
+      LOGGER.error("There was an error sending the batch: {}", ex.getMessage());
+    }
+  }
+
+  private void updateTotals(final BulkResponse bulkItemResponses, final Long sent, final Long sizeInBytes) {
+    long failed = 0;
+    long passed = 0;
+    long millis = bulkItemResponses.getTookInMillis();
+
+    // keep track of the number of totalFailed and items that we have totalOk.
+    for (BulkItemResponse resp : bulkItemResponses.getItems()) {
+      if (resp == null || resp.isFailed()) {
+        failed++;
+        LOGGER.debug("{} ({},{},{}) failed: {}", resp.getOpType(), resp.getIndex(), resp.getType(), resp.getId(), resp.getFailureMessage());
+      } else {
+        passed++;
+      }
+    }
 
-        if (failed > 0)
-            LOGGER.warn("Bulk Uploading had {} failures of {}", failed, sent);
-
-        this.totalOk.addAndGet(passed);
-        this.totalFailed.addAndGet(failed);
-        this.totalSeconds.addAndGet(millis / 1000);
-        this.totalSizeInBytes.addAndGet(sizeInBytes);
+    if (failed > 0) {
+      LOGGER.warn("Bulk Uploading had {} failures of {}", failed, sent);
+    }
 
-        if (sent != (passed + failed))
-            LOGGER.error("Count MisMatch: Sent[{}] Passed[{}] Failed[{}]", sent, passed, failed);
+    this.totalOk.addAndGet(passed);
+    this.totalFailed.addAndGet(failed);
+    this.totalSeconds.addAndGet(millis / 1000);
+    this.totalSizeInBytes.addAndGet(sizeInBytes);
 
-        LOGGER.debug("Batch[{}mb {} items with {} failures in {}ms] - Total[{}mb {} items with {} failures in {}seconds] {} outstanding]",
-                MEGABYTE_FORMAT.format(sizeInBytes / (double) (1024 * 1024)), NUMBER_FORMAT.format(passed), NUMBER_FORMAT.format(failed), NUMBER_FORMAT.format(millis),
-                MEGABYTE_FORMAT.format((double) totalSizeInBytes.get() / (double) (1024 * 1024)), NUMBER_FORMAT.format(totalOk), NUMBER_FORMAT.format(totalFailed), NUMBER_FORMAT.format(totalSeconds), NUMBER_FORMAT.format(getTotalOutstanding()));
+    if (sent != (passed + failed)) {
+      LOGGER.error("Count MisMatch: Sent[{}] Passed[{}] Failed[{}]", sent, passed, failed);
     }
 
+    LOGGER.debug("Batch[{}mb {} items with {} failures in {}ms] - Total[{}mb {} items with {} failures in {}seconds] {} outstanding]",
+        MEGABYTE_FORMAT.format(sizeInBytes / (double) (1024 * 1024)), NUMBER_FORMAT.format(passed), NUMBER_FORMAT.format(failed), NUMBER_FORMAT.format(millis),
+        MEGABYTE_FORMAT.format((double) totalSizeInBytes.get() / (double) (1024 * 1024)), NUMBER_FORMAT.format(totalOk), NUMBER_FORMAT.format(totalFailed), NUMBER_FORMAT.format(totalSeconds), NUMBER_FORMAT.format(getTotalOutstanding()));
+  }
+
 }


[23/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTaskIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTaskIT.java b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTaskIT.java
index cb71c90..6722f3c 100644
--- a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTaskIT.java
+++ b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTaskIT.java
@@ -15,13 +15,14 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.rss.provider;
 
 import org.apache.streams.core.StreamsDatum;
+
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import java.net.URL;
@@ -37,111 +38,113 @@ import static org.junit.Assert.assertTrue;
  */
 public class RssStreamProviderTaskIT {
 
-
-    @Before
-    public void clearPreviouslySeen() {
-        //some test runners run in parallel so needs to be synchronized
-        //if tests are run in parallel test will have undetermined results.
-        synchronized (RssStreamProviderTask.PREVIOUSLY_SEEN) {
-            RssStreamProviderTask.PREVIOUSLY_SEEN.clear();
-        }
+  /**
+   * clearPreviouslySeen.
+   */
+  @Before
+  public void clearPreviouslySeen() {
+    //some test runners run in parallel so needs to be synchronized
+    //if tests are run in parallel test will have undetermined results.
+    synchronized (RssStreamProviderTask.PREVIOUSLY_SEEN) {
+      RssStreamProviderTask.PREVIOUSLY_SEEN.clear();
     }
+  }
 
-    /**
-     * Test that a task can read a valid rss from a url and queue the data
-     * @throws Exception
-     */
-    @Test
-    public void testNonPerpetualNoTimeFramePull() throws Exception {
-        com.healthmarketscience.common.util.resource.Handler.init();
-        BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
-        RssStreamProviderTask task = new RssStreamProviderTask(queue, "fake url");
-        Set<String> batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
-        assertEquals("Expected batch size to be the same as amount of queued datums", batch.size(), queue.size());
-        RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
-        //Test that  it will out previously seen articles
-        queue.clear();
-        batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
-        assertEquals("Expected batch size to be the same as amount of queued datums", batch.size(), queue.size());
-    }
+  /**
+   * Test that a task can read a valid rss from a url and queue the data.
+   * @throws Exception Exception
+   */
+  @Test
+  public void testNonPerpetualNoTimeFramePull() throws Exception {
+    com.healthmarketscience.common.util.resource.Handler.init();
+    BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
+    RssStreamProviderTask task = new RssStreamProviderTask(queue, "fake url");
+    Set<String> batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
+    assertEquals("Expected batch size to be the same as amount of queued datums", batch.size(), queue.size());
+    RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
+    //Test that  it will out previously seen articles
+    queue.clear();
+    batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
+    assertEquals("Expected batch size to be the same as amount of queued datums", batch.size(), queue.size());
+  }
 
-    /**
-     * Test that perpetual streams will not output previously seen articles
-     * @throws Exception
-     */
-    @Test
-    public void testPerpetualNoTimeFramePull() throws Exception {
-        com.healthmarketscience.common.util.resource.Handler.init();
-        BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
-        RssStreamProviderTask task = new RssStreamProviderTask(queue, "fake url", new DateTime().minusYears(5), 10000, true);
-        Set<String> batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
-        assertEquals("Expected batch size to be the same as amount of queued datums", batch.size(), queue.size());
-        RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
-        //Test that it will not out previously seen articles
-        queue.clear();
-        batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
-        assertEquals("Expected queue size to be 0", 0, queue.size());
-        assertEquals("Expected batch size to be 20", 20, batch.size());
-        RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
-        //Test that not seen urls aren't blocked.
-        queue.clear();
-        batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist2.xml"));
-        assertEquals(batch.size(), queue.size());
-        assertEquals("Expected queue size to be 25", 25, queue.size());
-        assertEquals("Expected batch size to be 25", 25, batch.size());
-    }
+  /**
+   * Test that perpetual streams will not output previously seen articles.
+   * @throws Exception Exception
+   */
+  @Test
+  public void testPerpetualNoTimeFramePull() throws Exception {
+    com.healthmarketscience.common.util.resource.Handler.init();
+    BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
+    RssStreamProviderTask task = new RssStreamProviderTask(queue, "fake url", new DateTime().minusYears(5), 10000, true);
+    Set<String> batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
+    assertEquals("Expected batch size to be the same as amount of queued datums", batch.size(), queue.size());
+    RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
+    //Test that it will not out previously seen articles
+    queue.clear();
+    batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
+    assertEquals("Expected queue size to be 0", 0, queue.size());
+    assertEquals("Expected batch size to be 20", 20, batch.size());
+    RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
+    //Test that not seen urls aren't blocked.
+    queue.clear();
+    batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist2.xml"));
+    assertEquals(batch.size(), queue.size());
+    assertEquals("Expected queue size to be 25", 25, queue.size());
+    assertEquals("Expected batch size to be 25", 25, batch.size());
+  }
 
-    /**
-     * Test that you can task will only output aritcles after a certain published time
-     * @throws Exception
-     */
-    @Test
-    public void testNonPerpetualTimeFramedPull() throws Exception{
-        com.healthmarketscience.common.util.resource.Handler.init();
-        BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
-        DateTime publishedSince = new DateTime().withYear(2014).withDayOfMonth(5).withMonthOfYear(9).withZone(DateTimeZone.UTC);
-        RssStreamProviderTask task = new RssStreamProviderTask(queue, "fake url", publishedSince, 10000, false);
-        Set<String> batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
-        assertEquals( 15, queue.size());
-        assertEquals( 20 , batch.size());
-        assertTrue( queue.size() < batch.size());
-        RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
-        //Test that  it will out previously seen articles
-        queue.clear();
-        batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
-        assertEquals( 15, queue.size());
-        assertEquals( 20 , batch.size());
-        assertTrue( queue.size() < batch.size());
-    }
+  /**
+   * Test that you can task will only output aritcles after a certain published time.
+   * @throws Exception Exception
+   */
+  @Test
+  public void testNonPerpetualTimeFramedPull() throws Exception {
+    com.healthmarketscience.common.util.resource.Handler.init();
+    BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
+    DateTime publishedSince = new DateTime().withYear(2014).withDayOfMonth(5).withMonthOfYear(9).withZone(DateTimeZone.UTC);
+    RssStreamProviderTask task = new RssStreamProviderTask(queue, "fake url", publishedSince, 10000, false);
+    Set<String> batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
+    assertEquals( 15, queue.size());
+    assertEquals( 20 , batch.size());
+    assertTrue( queue.size() < batch.size());
+    RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
+    //Test that  it will out previously seen articles
+    queue.clear();
+    batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
+    assertEquals( 15, queue.size());
+    assertEquals( 20 , batch.size());
+    assertTrue( queue.size() < batch.size());
+  }
 
-    /**
-     * Test that task will only output articles after a certain published time that it has not seen before.
-     * @throws Exception
-     */
-    @Test
-    public void testPerpetualTimeFramedPull() throws Exception {
-        com.healthmarketscience.common.util.resource.Handler.init();
-        BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
-        DateTime publishedSince = new DateTime().withYear(2014).withDayOfMonth(5).withMonthOfYear(9).withZone(DateTimeZone.UTC);
-        RssStreamProviderTask task = new RssStreamProviderTask(queue, "fake url", publishedSince, 10000, true);
-        Set<String> batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
-        assertEquals( 15, queue.size());
-        assertEquals( 20 , batch.size());
-        assertTrue( queue.size() < batch.size());
-        RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
-        //Test that  it will not out put previously seen articles
-        queue.clear();
-        batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
-        assertEquals( 0, queue.size());
-        assertEquals( 20 , batch.size());
-        assertTrue( queue.size() < batch.size());
-        RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
-
-        batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist2.xml"));
-        assertTrue( queue.size() < batch.size());
-        assertEquals("Expected queue size to be 0", 3, queue.size());
-        assertEquals("Expected batch size to be 0", 25, batch.size());
-    }
+  /**
+   * Test that task will only output articles after a certain published time that it has not seen before.
+   * @throws Exception Exception
+   */
+  @Test
+  public void testPerpetualTimeFramedPull() throws Exception {
+    com.healthmarketscience.common.util.resource.Handler.init();
+    BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
+    DateTime publishedSince = new DateTime().withYear(2014).withDayOfMonth(5).withMonthOfYear(9).withZone(DateTimeZone.UTC);
+    RssStreamProviderTask task = new RssStreamProviderTask(queue, "fake url", publishedSince, 10000, true);
+    Set<String> batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
+    assertEquals( 15, queue.size());
+    assertEquals( 20 , batch.size());
+    assertTrue( queue.size() < batch.size());
+    RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
+    //Test that  it will not out put previously seen articles
+    queue.clear();
+    batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist1.xml"));
+    assertEquals( 0, queue.size());
+    assertEquals( 20 , batch.size());
+    assertTrue( queue.size() < batch.size());
+    RssStreamProviderTask.PREVIOUSLY_SEEN.put("fake url", batch);
+
+    batch = task.queueFeedEntries(new URL("resource:///test_rss_xml/economist2.xml"));
+    assertTrue( queue.size() < batch.size());
+    assertEquals("Expected queue size to be 0", 3, queue.size());
+    assertEquals("Expected batch size to be 0", 25, batch.size());
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTest.java b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTest.java
index 60b8e0f..08a58d3 100644
--- a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTest.java
+++ b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/RssStreamProviderTest.java
@@ -18,12 +18,13 @@
 
 package org.apache.streams.rss.provider;
 
-import com.carrotsearch.randomizedtesting.RandomizedTest;
-import com.google.common.collect.Queues;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.rss.RssStreamConfiguration;
 import org.apache.streams.rss.provider.perpetual.RssFeedScheduler;
+
+import com.carrotsearch.randomizedtesting.RandomizedTest;
+import com.google.common.collect.Queues;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -36,85 +37,87 @@ import java.util.concurrent.CountDownLatch;
  */
 public class RssStreamProviderTest extends RandomizedTest {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(RssStreamProviderTest.class);
-
-    @Test
-    public void testRssFeedShutdownsNonPerpetual() throws Exception {
-        RssStreamProvider provider = null;
-        try {
-            final CountDownLatch latch = new CountDownLatch(1);
-            BlockingQueue<StreamsDatum> datums = Queues.newLinkedBlockingQueue();
-            provider = new RssStreamProvider(new RssStreamConfiguration()) {
-                @Override
-                protected RssFeedScheduler getScheduler(BlockingQueue<StreamsDatum> queue) {
-                    return new MockScheduler(latch, queue);
-                }
-            };
-            provider.prepare(null);
-            int datumCount = 0;
-            provider.startStream();
-            while (!provider.scheduler.isComplete()) {
-                StreamsResultSet batch = provider.readCurrent();
-                LOGGER.debug("Batch size : {}", batch.size());
-                datumCount += batch.size();
-                Thread.sleep(randomIntBetween(0, 3000));
-            }
-            latch.await();
-
-            //one last pull incase of race condition
-            StreamsResultSet batch = provider.readCurrent();
-            LOGGER.debug("Batch size : {}", batch.size());
-            datumCount += batch.size();
-            if(batch.size() != 0) { //if race condition happened, pull again
-                batch = provider.readCurrent();
-                assertEquals(0, batch.size());
-            }
-
-            assertTrue(provider.scheduler.isComplete());
-            assertEquals(20, datumCount);
-            assertFalse(provider.isRunning());
-            assertEquals(0, datums.size());
-            provider.cleanUp();
-        } finally {
-            if(provider != null)
-                provider.cleanUp();
+  private static final Logger LOGGER = LoggerFactory.getLogger(RssStreamProviderTest.class);
+
+  @Test
+  public void testRssFeedShutdownsNonPerpetual() throws Exception {
+    RssStreamProvider provider = null;
+    try {
+      final CountDownLatch latch = new CountDownLatch(1);
+      BlockingQueue<StreamsDatum> datums = Queues.newLinkedBlockingQueue();
+      provider = new RssStreamProvider(new RssStreamConfiguration()) {
+        @Override
+        protected RssFeedScheduler getScheduler(BlockingQueue<StreamsDatum> queue) {
+          return new MockScheduler(latch, queue);
         }
+      };
+      provider.prepare(null);
+      int datumCount = 0;
+      provider.startStream();
+      while (!provider.scheduler.isComplete()) {
+        StreamsResultSet batch = provider.readCurrent();
+        LOGGER.debug("Batch size : {}", batch.size());
+        datumCount += batch.size();
+        Thread.sleep(randomIntBetween(0, 3000));
+      }
+      latch.await();
+
+      //one last pull incase of race condition
+      StreamsResultSet batch = provider.readCurrent();
+      LOGGER.debug("Batch size : {}", batch.size());
+      datumCount += batch.size();
+      if (batch.size() != 0) {
+        //if race condition happened, pull again
+        batch = provider.readCurrent();
+        assertEquals(0, batch.size());
+      }
+
+      assertTrue(provider.scheduler.isComplete());
+      assertEquals(20, datumCount);
+      assertFalse(provider.isRunning());
+      assertEquals(0, datums.size());
+      provider.cleanUp();
+    } finally {
+      if (provider != null) {
+        provider.cleanUp();
+      }
     }
+  }
 
 
-    private class MockScheduler extends RssFeedScheduler {
+  private class MockScheduler extends RssFeedScheduler {
 
-        private BlockingQueue<StreamsDatum> queue;
-        private CountDownLatch latch;
-        private volatile boolean complete = false;
+    private BlockingQueue<StreamsDatum> queue;
+    private CountDownLatch latch;
+    private volatile boolean complete = false;
 
-        public MockScheduler(CountDownLatch latch, BlockingQueue<StreamsDatum> dataQueue) {
-            super(null, null, dataQueue);
-            this.latch = latch;
-            this.queue = dataQueue;
-        }
+    public MockScheduler(CountDownLatch latch, BlockingQueue<StreamsDatum> dataQueue) {
+      super(null, null, dataQueue);
+      this.latch = latch;
+      this.queue = dataQueue;
+    }
 
-        @Override
-        public void run() {
-            try {
-                for (int i = 0; i < 20; ++i) {
-                    this.queue.put(new StreamsDatum(null));
-                    Thread.sleep(randomIntBetween(0, 5000));
-                }
-            } catch (InterruptedException ie) {
-                Thread.currentThread().interrupt();
-            } finally {
-                this.complete = true;
-                this.latch.countDown();
-            }
+    @Override
+    public void run() {
+      try {
+        for (int i = 0; i < 20; ++i) {
+          this.queue.put(new StreamsDatum(null));
+          Thread.sleep(randomIntBetween(0, 5000));
         }
+      } catch (InterruptedException ie) {
+        Thread.currentThread().interrupt();
+      } finally {
+        this.complete = true;
+        this.latch.countDown();
+      }
+    }
 
 
-        @Override
-        public boolean isComplete() {
-            return this.complete;
-        }
+    @Override
+    public boolean isComplete() {
+      return this.complete;
     }
+  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/perpetual/RssFeedSchedulerTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/perpetual/RssFeedSchedulerTest.java b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/perpetual/RssFeedSchedulerTest.java
index 2bd0b69..830f0e7 100644
--- a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/perpetual/RssFeedSchedulerTest.java
+++ b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/provider/perpetual/RssFeedSchedulerTest.java
@@ -15,12 +15,14 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.rss.provider.perpetual;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.rss.FeedDetails;
 import org.apache.streams.rss.provider.RssStreamProviderTask;
+
+import com.google.common.collect.Lists;
 import org.junit.Test;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
@@ -34,7 +36,6 @@ import static org.junit.Assert.assertEquals;
 import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 /**
  * Unit tests for {@link org.apache.streams.rss.provider.perpetual.RssFeedScheduler}
@@ -42,60 +43,60 @@ import static org.mockito.Mockito.when;
 public class RssFeedSchedulerTest {
 
 
-    /**
-     * Test that feeds are scheduled based on elapsed time correctly.
-     * Takes 1 minute to run.
-     */
-    @Test
-    public void testScheduleFeeds() {
-        ExecutorService mockService = mock(ExecutorService.class);
-        final List<String> queuedTasks = new ArrayList<>(5);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                queuedTasks.add(((RssStreamProviderTask) invocationOnMock.getArguments()[0]).getRssFeed());
-                return null;
-            }
-        }).when(mockService).execute(any(Runnable.class));
+  /**
+   * Test that feeds are scheduled based on elapsed time correctly.
+   * Takes 1 minute to run.
+   */
+  @Test
+  public void testScheduleFeeds() {
+    ExecutorService mockService = mock(ExecutorService.class);
+    final List<String> queuedTasks = new ArrayList<>(5);
+    doAnswer(new Answer() {
+      @Override
+      public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
+        queuedTasks.add(((RssStreamProviderTask) invocationOnMock.getArguments()[0]).getRssFeed());
+        return null;
+      }
+    }).when(mockService).execute(any(Runnable.class));
 
-        RssFeedScheduler scheduler = new RssFeedScheduler(mockService, createFeedList(), new LinkedBlockingQueue<StreamsDatum>(), 1);
-        scheduler.scheduleFeeds();
-        assertEquals("Expected 2 Feeds to be scheduled", 2, queuedTasks.size());
-        assertEquals("Expected Feed 1 to be queued first",  "1", queuedTasks.get(0));
-        assertEquals("Expected Feed 2 to be queued second", "2", queuedTasks.get(1));
+    RssFeedScheduler scheduler = new RssFeedScheduler(mockService, createFeedList(), new LinkedBlockingQueue<StreamsDatum>(), 1);
+    scheduler.scheduleFeeds();
+    assertEquals("Expected 2 Feeds to be scheduled", 2, queuedTasks.size());
+    assertEquals("Expected Feed 1 to be queued first",  "1", queuedTasks.get(0));
+    assertEquals("Expected Feed 2 to be queued second", "2", queuedTasks.get(1));
 
-        safeSleep(1);
-        scheduler.scheduleFeeds();
-        assertEquals("Only feed 1 should have been re-queued", 3, queuedTasks.size());
-        assertEquals("Only feed 1 should have been re-queued", "1", queuedTasks.get(2));
+    safeSleep(1);
+    scheduler.scheduleFeeds();
+    assertEquals("Only feed 1 should have been re-queued", 3, queuedTasks.size());
+    assertEquals("Only feed 1 should have been re-queued", "1", queuedTasks.get(2));
 
-        safeSleep(60 * 1000);
-        scheduler.scheduleFeeds();
-        assertEquals("Both feeds should have been re-queued", 5, queuedTasks.size());
-        assertEquals("1", queuedTasks.get(3));
-        assertEquals("2", queuedTasks.get(4));
-    }
+    safeSleep(60 * 1000);
+    scheduler.scheduleFeeds();
+    assertEquals("Both feeds should have been re-queued", 5, queuedTasks.size());
+    assertEquals("1", queuedTasks.get(3));
+    assertEquals("2", queuedTasks.get(4));
+  }
 
-    private List<FeedDetails> createFeedList() {
-        List<FeedDetails> list = Lists.newLinkedList();
-        FeedDetails fd = new FeedDetails();
-        fd.setPollIntervalMillis(1L);
-        fd.setUrl("1");
-        list.add(fd);
+  private List<FeedDetails> createFeedList() {
+    List<FeedDetails> list = Lists.newLinkedList();
+    FeedDetails fd = new FeedDetails();
+    fd.setPollIntervalMillis(1L);
+    fd.setUrl("1");
+    list.add(fd);
 
-        fd = new FeedDetails();
-        fd.setPollIntervalMillis( 60L * 1000);
-        fd.setUrl("2");
-        list.add(fd);
-        return list;
-    }
+    fd = new FeedDetails();
+    fd.setPollIntervalMillis( 60L * 1000);
+    fd.setUrl("2");
+    list.add(fd);
+    return list;
+  }
 
-    private void safeSleep(long milliseconds) {
-        try {
-            Thread.sleep(milliseconds);
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
+  private void safeSleep(long milliseconds) {
+    try {
+      Thread.sleep(milliseconds);
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssStreamProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssStreamProviderIT.java b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssStreamProviderIT.java
index ccac8aa..ccd8b74 100644
--- a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssStreamProviderIT.java
+++ b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssStreamProviderIT.java
@@ -18,17 +18,16 @@
 
 package org.apache.streams.rss.test;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.rss.FeedDetails;
+import org.apache.streams.rss.RssStreamConfiguration;
+import org.apache.streams.rss.provider.RssStreamProvider;
+
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import org.apache.commons.lang.StringUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.rss.FeedDetails;
-import org.apache.streams.rss.RssStreamConfiguration;
-import org.apache.streams.rss.provider.RssStreamProvider;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -53,69 +52,68 @@ import static org.hamcrest.number.OrderingComparison.greaterThan;
  */
 public class RssStreamProviderIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(RssStreamProviderIT.class);
-
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    @Test
-    public void testRssStreamProvider() throws Exception {
-
-        String configfile = "./target/test-classes/RssStreamProviderIT.conf";
-        String outfile = "./target/test-classes/RssStreamProviderIT.stdout.txt";
-
-        InputStream is = RssStreamProviderIT.class.getResourceAsStream("/top100.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
-
-        RssStreamConfiguration configuration = new RssStreamConfiguration();
-        List<FeedDetails> feedArray = Lists.newArrayList();
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                if(!StringUtils.isEmpty(line))
-                {
-                    feedArray.add(new FeedDetails().withUrl(line).withPollIntervalMillis(5000l));
-                }
-            }
-            configuration.setFeeds(feedArray);
-        } catch( Exception e ) {
-            System.out.println(e);
-            e.printStackTrace();
-            Assert.fail();
+  private static final Logger LOGGER = LoggerFactory.getLogger(RssStreamProviderIT.class);
+
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  @Test
+  public void testRssStreamProvider() throws Exception {
+
+    final String configfile = "./target/test-classes/RssStreamProviderIT.conf";
+    final String outfile = "./target/test-classes/RssStreamProviderIT.stdout.txt";
+
+    InputStream is = RssStreamProviderIT.class.getResourceAsStream("/top100.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
+
+    RssStreamConfiguration configuration = new RssStreamConfiguration();
+    List<FeedDetails> feedArray = Lists.newArrayList();
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        if (!StringUtils.isEmpty(line)) {
+          feedArray.add(new FeedDetails().withUrl(line).withPollIntervalMillis(5000L));
         }
+      }
+      configuration.setFeeds(feedArray);
+    } catch ( Exception ex ) {
+      System.out.println(ex);
+      ex.printStackTrace();
+      Assert.fail();
+    }
 
-        Assert.assertThat(configuration.getFeeds().size(), greaterThan(70));
+    Assert.assertThat(configuration.getFeeds().size(), greaterThan(70));
 
-        OutputStream os = new FileOutputStream(configfile);
-        OutputStreamWriter osw = new OutputStreamWriter(os);
-        BufferedWriter bw = new BufferedWriter(osw);
+    OutputStream os = new FileOutputStream(configfile);
+    OutputStreamWriter osw = new OutputStreamWriter(os);
+    BufferedWriter bw = new BufferedWriter(osw);
 
-        // write conf
-        ObjectNode feedsNode = mapper.convertValue(configuration, ObjectNode.class);
-        JsonNode configNode = mapper.createObjectNode().set("rss", feedsNode);
+    // write conf
+    ObjectNode feedsNode = mapper.convertValue(configuration, ObjectNode.class);
+    JsonNode configNode = mapper.createObjectNode().set("rss", feedsNode);
 
-        bw.write(mapper.writeValueAsString(configNode));
-        bw.flush();
-        bw.close();
+    bw.write(mapper.writeValueAsString(configNode));
+    bw.flush();
+    bw.close();
 
-        File config = new File(configfile);
-        assert (config.exists());
-        assert (config.canRead());
-        assert (config.isFile());
+    File config = new File(configfile);
+    assert (config.exists());
+    assert (config.canRead());
+    assert (config.isFile());
 
-        RssStreamProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
+    RssStreamProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 200);
+    assert (outCounter.getLineNumber() >= 200);
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssTypeConverterTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssTypeConverterTest.java b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssTypeConverterTest.java
index 9def7ac..37833c5 100644
--- a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssTypeConverterTest.java
+++ b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/RssTypeConverterTest.java
@@ -18,17 +18,18 @@
 
 package org.apache.streams.rss.test;
 
-import org.apache.commons.lang3.SerializationUtils;
 import org.apache.streams.rss.processor.RssTypeConverter;
+
+import org.apache.commons.lang3.SerializationUtils;
 import org.junit.Test;
 
 /**
  * Tests Serializability of {@link org.apache.streams.rss.processor.RssTypeConverter}
  */
 public class RssTypeConverterTest {
-    @Test
-    public void testSerializability() {
-        RssTypeConverter converter = new RssTypeConverter();
-        RssTypeConverter clone = SerializationUtils.clone(converter);
-    }
+  @Test
+  public void testSerializability() {
+    RssTypeConverter converter = new RssTypeConverter();
+    RssTypeConverter clone = SerializationUtils.clone(converter);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/SyndEntryActivitySerializerIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/SyndEntryActivitySerializerIT.java b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/SyndEntryActivitySerializerIT.java
index b1d5f9d..01f1999 100644
--- a/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/SyndEntryActivitySerializerIT.java
+++ b/streams-contrib/streams-provider-rss/src/test/java/org/apache/streams/rss/test/SyndEntryActivitySerializerIT.java
@@ -18,15 +18,16 @@
 
 package org.apache.streams.rss.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Author;
 import org.apache.streams.pojo.json.Provider;
 import org.apache.streams.rss.serializer.SyndEntryActivitySerializer;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.junit.Test;
@@ -37,88 +38,94 @@ import java.net.URL;
 import java.util.List;
 import java.util.Scanner;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Tests ability to convert SyndEntry ObjectNode form to {@link org.apache.streams.rss.processor.RssTypeConverter} form
  */
 public class SyndEntryActivitySerializerIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SyndEntryActivitySerializerIT.class);
-
-    private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    @Test
-    public void testJsonData() throws Exception {
-        Scanner scanner = new Scanner(this.getClass().getResourceAsStream("/TestSyndEntryJson.txt"));
-        List<Activity> activities = Lists.newLinkedList();
-        List<ObjectNode> objects = Lists.newLinkedList();
-
-        SyndEntryActivitySerializer serializer = new SyndEntryActivitySerializer();
+  private static final Logger LOGGER = LoggerFactory.getLogger(SyndEntryActivitySerializerIT.class);
 
-        while(scanner.hasNext()) {
-            String line = scanner.nextLine();
-            LOGGER.debug(line);
-            ObjectNode node = (ObjectNode) mapper.readTree(line);
+  private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-            objects.add(node);
-            activities.add(serializer.deserialize(node));
-        }
+  @Test
+  public void testJsonData() throws Exception {
+    Scanner scanner = new Scanner(this.getClass().getResourceAsStream("/TestSyndEntryJson.txt"));
+    List<Activity> activities = Lists.newLinkedList();
+    List<ObjectNode> objects = Lists.newLinkedList();
 
-        assertEquals(11, activities.size());
+    SyndEntryActivitySerializer serializer = new SyndEntryActivitySerializer();
 
-        for(int x = 0; x < activities.size(); x ++) {
-            ObjectNode n = objects.get(x);
-            Activity a = activities.get(x);
+    while (scanner.hasNext()) {
+      String line = scanner.nextLine();
+      LOGGER.debug(line);
+      ObjectNode node = (ObjectNode) mapper.readTree(line);
 
-            testActor(n.get("author").asText(), a.getActor());
-            testAuthor(n.get("author").asText(), a.getObject().getAuthor());
-            testProvider("id:providers:rss", "RSS", a.getProvider());
-            testProviderUrl(a.getProvider());
-            testVerb("post", a.getVerb());
-            testPublished(n.get("publishedDate").asText(), a.getPublished());
-            testUrl(n.get("uri").asText(), n.get("link").asText(), a);
-        }
+      objects.add(node);
+      activities.add(serializer.deserialize(node));
     }
 
-    public void testVerb(String expected, String verb) {
-        assertEquals(expected, verb);
-    }
+    assertEquals(11, activities.size());
 
-    public void testPublished(String expected, DateTime published) {
-        assertEquals(new DateTime(expected, DateTimeZone.UTC), published);
-    }
+    for (int x = 0; x < activities.size(); x++) {
+      ObjectNode objectNode = objects.get(x);
+      Activity activity = activities.get(x);
 
-    public void testActor(String expected, ActivityObject actor) {
-        assertEquals("id:rss:null" + ":" + expected, actor.getId());
-        assertEquals(expected, actor.getDisplayName());
+      testActor(objectNode.get("author").asText(), activity.getActor());
+      testAuthor(objectNode.get("author").asText(), activity.getObject().getAuthor());
+      testProvider("id:providers:rss", "RSS", activity.getProvider());
+      validateProviderUrl(activity.getProvider());
+      testVerb("post", activity.getVerb());
+      testPublished(objectNode.get("publishedDate").asText(), activity.getPublished());
+      testUrl(objectNode.get("uri").asText(), objectNode.get("link").asText(), activity);
     }
-
-    public void testAuthor(String expected, Author author) {
-        assertEquals(expected, author.getDisplayName());
-        assertEquals(expected, author.getId());
-    }
-
-    public void testProvider(String expectedId, String expectedDisplay, Provider provider) {
-        assertEquals(expectedId, provider.getId());
-        assertEquals(expectedDisplay, provider.getDisplayName());
+  }
+
+  public void testVerb(String expected, String verb) {
+    assertEquals(expected, verb);
+  }
+
+  public void testPublished(String expected, DateTime published) {
+    assertEquals(new DateTime(expected, DateTimeZone.UTC), published);
+  }
+
+  public void testActor(String expected, ActivityObject actor) {
+    assertEquals("id:rss:null" + ":" + expected, actor.getId());
+    assertEquals(expected, actor.getDisplayName());
+  }
+
+  public void testAuthor(String expected, Author author) {
+    assertEquals(expected, author.getDisplayName());
+    assertEquals(expected, author.getId());
+  }
+
+  public void testProvider(String expectedId, String expectedDisplay, Provider provider) {
+    assertEquals(expectedId, provider.getId());
+    assertEquals(expectedDisplay, provider.getDisplayName());
+  }
+
+  /**
+   * validate Provider Url.
+   * @param provider Provider
+   */
+  public void validateProviderUrl(Provider provider) {
+    URL url = null;
+
+    try {
+      url = new URL(provider.getUrl());
+      url.toURI();
+    } catch (Exception ex) {
+      LOGGER.error("Threw an exception while trying to validate URL: {} - {}", provider.getUrl(), ex);
     }
 
-    public void testProviderUrl(Provider provider) {
-        URL url = null;
+    assertNotNull(url);
+  }
 
-        try {
-            url = new URL(provider.getUrl());
-            url.toURI();
-        } catch(Exception e) {
-            LOGGER.error("Threw an exception while trying to validate URL: {} - {}", provider.getUrl(), e);
-        }
-
-        assertNotNull(url);
-    }
-
-    public void testUrl(String expectedURI, String expectedLink, Activity activity) {
-        assertTrue((expectedURI == activity.getUrl() || expectedLink == activity.getUrl()));
-        assertTrue((expectedURI == activity.getObject().getUrl() || expectedLink == activity.getObject().getUrl()));
-    }
+  public void testUrl(String expectedUri, String expectedLink, Activity activity) {
+    assertTrue((expectedUri == activity.getUrl() || expectedLink == activity.getUrl()));
+    assertTrue((expectedUri == activity.getObject().getUrl() || expectedLink == activity.getObject().getUrl()));
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/SysomosException.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/SysomosException.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/SysomosException.java
index a38e267..1480308 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/SysomosException.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/SysomosException.java
@@ -20,44 +20,44 @@
 package org.apache.streams.sysomos;
 
 /**
- * Runtime exception wrapper for Sysomos Errors
+ * Runtime exception wrapper for Sysomos Errors.
  */
 public class SysomosException extends RuntimeException {
 
-    private int errorCode = -1;
+  private int errorCode = -1;
 
-    public SysomosException() {
-        // TODO Auto-generated constructor stub
-    }
+  public SysomosException() {
+    // TODO Auto-generated constructor stub
+  }
 
-    public SysomosException(String arg0) {
-        super(arg0);
-        // TODO Auto-generated constructor stub
-    }
+  public SysomosException(String arg0) {
+    super(arg0);
+    // TODO Auto-generated constructor stub
+  }
 
-    public SysomosException(Throwable arg0) {
-        super(arg0);
-        // TODO Auto-generated constructor stub
-    }
+  public SysomosException(Throwable arg0) {
+    super(arg0);
+    // TODO Auto-generated constructor stub
+  }
 
-    public SysomosException(String arg0, Throwable arg1) {
-        super(arg0, arg1);
-        // TODO Auto-generated constructor stub
-    }
+  public SysomosException(String arg0, Throwable arg1) {
+    super(arg0, arg1);
+    // TODO Auto-generated constructor stub
+  }
 
-    public SysomosException(String arg0, int errorCode) {
-        super(arg0);
-        this.errorCode = errorCode;
-    }
+  public SysomosException(String arg0, int errorCode) {
+    super(arg0);
+    this.errorCode = errorCode;
+  }
 
-    public SysomosException(String arg0, Throwable arg1, int errorCode) {
-        super(arg0, arg1);
-        this.errorCode = errorCode;
-    }
+  public SysomosException(String arg0, Throwable arg1, int errorCode) {
+    super(arg0, arg1);
+    this.errorCode = errorCode;
+  }
 
-    public int getErrorCode() {
-        return this.errorCode;
-    }
+  public int getErrorCode() {
+    return this.errorCode;
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/conversion/SysomosBeatActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/conversion/SysomosBeatActivityConverter.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/conversion/SysomosBeatActivityConverter.java
index 5d2a399..c0278cd 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/conversion/SysomosBeatActivityConverter.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/conversion/SysomosBeatActivityConverter.java
@@ -19,12 +19,13 @@
 
 package org.apache.streams.sysomos.conversion;
 
-import com.sysomos.xml.BeatApi;
-import org.apache.commons.lang.StringUtils;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Provider;
+
+import com.sysomos.xml.BeatApi;
+import org.apache.commons.lang.StringUtils;
 import org.joda.time.DateTime;
 
 import java.util.HashMap;
@@ -42,103 +43,109 @@ import static org.apache.streams.data.util.ActivityUtil.getProviderId;
  */
 public class SysomosBeatActivityConverter {
 
-    private static final String LANGUAGE_KEY = "LANGUAGE";
-
-    public Activity convert(BeatApi.BeatResponse.Beat beat) {
-        Activity converted = new Activity();
-        converted.setId(beat.getDocid());
-        converted.setVerb("post");
-        converted.setContent(beat.getContent());
-        converted.setTitle(beat.getTitle());
-        converted.setPublished(new DateTime(beat.getTime()));
-        converted.setUrl(beat.getLink());
-        converted.setActor(new ActivityObject());
-        Map<String, BeatApi.BeatResponse.Beat.Tag> mappedTags = mapTags(beat);
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(converted);
-        extensions.put("keywords", beat.getContent());
-        setLocation(beat, extensions);
-        setObject(beat, converted);
-        setProvider(beat, converted);
-        setLanguage(mappedTags, extensions);
-        extensions.put("sysomos", beat);
-
-        setChannelSpecificValues(beat, converted, mappedTags);
-
-        return converted;
-    }
-
-    protected void setChannelSpecificValues(BeatApi.BeatResponse.Beat beat, Activity converted, Map<String, BeatApi.BeatResponse.Beat.Tag> mappedTags) {
-        String mediaType = beat.getMediaType();
-        String lowerMediaType = mediaType.toLowerCase();
-        ActivityObject actor = converted.getActor();
-        ActivityObject object = converted.getObject();
-        if ("TWITTER".equals(mediaType)) {
-            actor.setId(getPersonId(lowerMediaType, beat.getHost()));
-            actor.setDisplayName(beat.getHost());
-            actor.setUrl("http://twitter.com/" + beat.getHost());
-            object.setObjectType("tweet");
-            object.setId(getObjectId(lowerMediaType, "tweet", beat.getTweetid()));
-        } else if ("FACEBOOK".equals(mediaType)) {
-            String fbid = mappedTags.containsKey("FBID") ? mappedTags.get("FBID").getValue() : "";
-            actor.setId(getPersonId(lowerMediaType, fbid));
-            actor.setDisplayName(beat.getTitle());
-            actor.setUrl(beat.getHost());
-            object.setObjectType("post");
-            object.setId(getObjectId(lowerMediaType, "post", String.valueOf(converted.getContent().hashCode())));
-        } else {
-            actor.setId(null);
-            actor.setDisplayName(null);
-            actor.setUrl(null);
-            object.setObjectType("post");
-            object.setId(getObjectId(lowerMediaType, "post", String.valueOf(converted.getContent().hashCode())));
-        }
+  private static final String LANGUAGE_KEY = "LANGUAGE";
+
+  /**
+   * convert BeatApi.BeatResponse.Beat to Activity
+   * @param beat BeatApi.BeatResponse.Beat
+   * @return Activity
+   */
+  public Activity convert(BeatApi.BeatResponse.Beat beat) {
+    Activity converted = new Activity();
+    converted.setId(beat.getDocid());
+    converted.setVerb("post");
+    converted.setContent(beat.getContent());
+    converted.setTitle(beat.getTitle());
+    converted.setPublished(new DateTime(beat.getTime()));
+    converted.setUrl(beat.getLink());
+    converted.setActor(new ActivityObject());
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(converted);
+    extensions.put("keywords", beat.getContent());
+    setLocation(beat, extensions);
+    setObject(beat, converted);
+    setProvider(beat, converted);
+    Map<String, BeatApi.BeatResponse.Beat.Tag> mappedTags = mapTags(beat);
+    setLanguage(mappedTags, extensions);
+    extensions.put("sysomos", beat);
+
+    setChannelSpecificValues(beat, converted, mappedTags);
+
+    return converted;
+  }
+
+  protected void setChannelSpecificValues(
+      BeatApi.BeatResponse.Beat beat,
+      Activity converted, Map<String, BeatApi.BeatResponse.Beat.Tag> mappedTags) {
+    String mediaType = beat.getMediaType();
+    String lowerMediaType = mediaType.toLowerCase();
+    ActivityObject actor = converted.getActor();
+    ActivityObject object = converted.getObject();
+    if ("TWITTER".equals(mediaType)) {
+      actor.setId(getPersonId(lowerMediaType, beat.getHost()));
+      actor.setDisplayName(beat.getHost());
+      actor.setUrl("http://twitter.com/" + beat.getHost());
+      object.setObjectType("tweet");
+      object.setId(getObjectId(lowerMediaType, "tweet", beat.getTweetid()));
+    } else if ("FACEBOOK".equals(mediaType)) {
+      String fbid = mappedTags.containsKey("FBID") ? mappedTags.get("FBID").getValue() : "";
+      actor.setId(getPersonId(lowerMediaType, fbid));
+      actor.setDisplayName(beat.getTitle());
+      actor.setUrl(beat.getHost());
+      object.setObjectType("post");
+      object.setId(getObjectId(lowerMediaType, "post", String.valueOf(converted.getContent().hashCode())));
+    } else {
+      actor.setId(null);
+      actor.setDisplayName(null);
+      actor.setUrl(null);
+      object.setObjectType("post");
+      object.setId(getObjectId(lowerMediaType, "post", String.valueOf(converted.getContent().hashCode())));
     }
+  }
 
-    protected void setLanguage(Map<String, BeatApi.BeatResponse.Beat.Tag> mappedTags, Map<String, Object> extensions) {
-        if(mappedTags.containsKey(LANGUAGE_KEY)) {
-            extensions.put(LANGUAGE_EXTENSION, mappedTags.get(LANGUAGE_KEY).getValue());
-        }
+  protected void setLanguage(Map<String, BeatApi.BeatResponse.Beat.Tag> mappedTags, Map<String, Object> extensions) {
+    if (mappedTags.containsKey(LANGUAGE_KEY)) {
+      extensions.put(LANGUAGE_EXTENSION, mappedTags.get(LANGUAGE_KEY).getValue());
     }
-
-    protected void setObject(BeatApi.BeatResponse.Beat beat, Activity converted) {
-        ActivityObject object = new ActivityObject();
-        converted.setObject(object);
-        object.setUrl(beat.getLink());
-        object.setContent(beat.getContent());
-    }
-
-    @SuppressWarnings("unchecked")
-    protected void setLocation(BeatApi.BeatResponse.Beat beat, Map<String, Object> extensions) {
-        Map<String, Object> location;
-        String country = beat.getLocation().getCountry();
-        if(StringUtils.isNotBlank(country)) {
-            if (extensions.containsKey(LOCATION_EXTENSION)) {
-                location = (Map<String, Object>) extensions.get(LOCATION_EXTENSION);
-            } else {
-                location = new HashMap<>();
-                extensions.put(LOCATION_EXTENSION, location);
-            }
-            location.put(LOCATION_EXTENSION_COUNTRY, country);
-        }
+  }
+
+  protected void setObject(BeatApi.BeatResponse.Beat beat, Activity converted) {
+    ActivityObject object = new ActivityObject();
+    converted.setObject(object);
+    object.setUrl(beat.getLink());
+    object.setContent(beat.getContent());
+  }
+
+  @SuppressWarnings("unchecked")
+  protected void setLocation(BeatApi.BeatResponse.Beat beat, Map<String, Object> extensions) {
+    Map<String, Object> location;
+    String country = beat.getLocation().getCountry();
+    if (StringUtils.isNotBlank(country)) {
+      if (extensions.containsKey(LOCATION_EXTENSION)) {
+        location = (Map<String, Object>) extensions.get(LOCATION_EXTENSION);
+      } else {
+        location = new HashMap<>();
+        extensions.put(LOCATION_EXTENSION, location);
+      }
+      location.put(LOCATION_EXTENSION_COUNTRY, country);
     }
-
-    protected void setProvider(BeatApi.BeatResponse.Beat beat, Activity converted) {
-        Provider provider = new Provider();
-        String mediaType = beat.getMediaType().toLowerCase();
-        provider.setId(getProviderId(mediaType));
-        provider.setDisplayName(StringUtils.capitalize(mediaType));
-        converted.setProvider(provider);
+  }
+
+  protected void setProvider(BeatApi.BeatResponse.Beat beat, Activity converted) {
+    Provider provider = new Provider();
+    String mediaType = beat.getMediaType().toLowerCase();
+    provider.setId(getProviderId(mediaType));
+    provider.setDisplayName(StringUtils.capitalize(mediaType));
+    converted.setProvider(provider);
+  }
+
+  protected Map<String, BeatApi.BeatResponse.Beat.Tag> mapTags(BeatApi.BeatResponse.Beat beat) {
+    Map<String, BeatApi.BeatResponse.Beat.Tag> tags = new HashMap<>();
+    for (BeatApi.BeatResponse.Beat.Tag tag : beat.getTag()) {
+      if (tag.getSystemType() != null) {
+        tags.put(tag.getSystemType().trim(), tag);
+      }
     }
-
-    protected Map<String, BeatApi.BeatResponse.Beat.Tag> mapTags(BeatApi.BeatResponse.Beat beat) {
-        Map<String, BeatApi.BeatResponse.Beat.Tag> tags = new HashMap<>();
-        for(BeatApi.BeatResponse.Beat.Tag tag : beat.getTag()) {
-            if(tag.getSystemType() != null) {
-                tags.put(tag.getSystemType().trim(), tag);
-            }
-        }
-        return tags;
-    }
-
+    return tags;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/HeartbeatInfo.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/HeartbeatInfo.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/HeartbeatInfo.java
index 5915140..eede9f4 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/HeartbeatInfo.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/HeartbeatInfo.java
@@ -16,122 +16,116 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.sysomos.data;
 
-import org.apache.streams.sysomos.data.SysomosTagDefinition;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.xml.sax.InputSource;
 
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
 import java.io.ByteArrayInputStream;
 import java.util.ArrayList;
 import java.util.List;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
 
 /**
- * Represents Heatbeat metadata from the Sysomos API
+ * Represents Heatbeat metadata from the Sysomos API.
  */
 public class HeartbeatInfo {
 
-    private Document doc;
-    private List<SysomosTagDefinition> tags;
-
-    public HeartbeatInfo(String xmlString) throws Exception {
-        DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
-        DocumentBuilder docBuilder = dbFactory.newDocumentBuilder();
-        this.doc = docBuilder.parse(new InputSource(new ByteArrayInputStream(xmlString.getBytes("utf-8"))));
-        this.tags = new ArrayList<SysomosTagDefinition>();
-        createTagDefinitions();
-    }
-
-
-    private void createTagDefinitions() {
-        this.tags = new ArrayList<SysomosTagDefinition>();
-        NodeList tagList = this.doc.getElementsByTagName("tag");
-
-        for(int i=0; i < tagList.getLength(); ++i) {
-            Node tag =  tagList.item(i);
-            SysomosTagDefinition tagDefinition = createSysomosTagDefinitionFromNode(tag);
-            if(this.hasTagName(tagDefinition.getTagName())) {
-                SysomosTagDefinition otherTag = this.getTagWithTagName(tagDefinition.getTagName());
-                if(!otherTag.getDisplayName().equals(tagDefinition.getDisplayName())) {
-                    throw new RuntimeException("A single tag ("+otherTag.getTagName()+") has multiple display names ("+otherTag.getDisplayName()+" , "+tagDefinition.getDisplayName()+")");
-                }
-                else {
-                    List<String> otherQueries = otherTag.getQueries();
-                    for(String query : tagDefinition.getQueries()) {
-                        if(!otherQueries.contains(query)) {
-                            otherTag.addQuery(query);
-                        }
-                    }
-                }
-            }
-            else {
-                this.tags.add(tagDefinition);
-            }
-
-        }
-    }
-
-    private SysomosTagDefinition createSysomosTagDefinitionFromNode(Node tag) {
-        Element tagElement = (Element) tag;
-        SysomosTagDefinition tagDefinition = new SysomosTagDefinition(tagElement.getElementsByTagName("name").item(0).getTextContent(),
-                tagElement.getElementsByTagName("displayName").item(0).getTextContent());
-        NodeList taggingRule = tagElement.getElementsByTagName("taggingRule");
-        for(int i=0; i < taggingRule.getLength(); ++i) {
-            Element rule = (Element) taggingRule.item(i);
-            NodeList queries = rule.getElementsByTagName("query");
-            for(int j=0; j < queries.getLength(); ++j) {
-                Element query = (Element) queries.item(j);
-                tagDefinition.addQuery(query.getTextContent());
+  private Document doc;
+  private List<SysomosTagDefinition> tags;
+
+  /**
+   * HeartbeatInfo constructor.
+   * @param xmlString xmlString
+   * @throws Exception Exception
+   */
+  public HeartbeatInfo(String xmlString) throws Exception {
+    DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+    DocumentBuilder docBuilder = dbFactory.newDocumentBuilder();
+    this.doc = docBuilder.parse(new InputSource(new ByteArrayInputStream(xmlString.getBytes("utf-8"))));
+    this.tags = new ArrayList<SysomosTagDefinition>();
+    createTagDefinitions();
+  }
+
+  private void createTagDefinitions() {
+    this.tags = new ArrayList<SysomosTagDefinition>();
+    NodeList tagList = this.doc.getElementsByTagName("tag");
+
+    for (int i = 0; i < tagList.getLength(); ++i) {
+      Node tag =  tagList.item(i);
+      SysomosTagDefinition tagDefinition = createSysomosTagDefinitionFromNode(tag);
+      if (this.hasTagName(tagDefinition.getTagName())) {
+        SysomosTagDefinition otherTag = this.getTagWithTagName(tagDefinition.getTagName());
+        if (!otherTag.getDisplayName().equals(tagDefinition.getDisplayName())) {
+          throw new RuntimeException("A single tag ("
+              + otherTag.getTagName()
+              + ") has multiple display names ("
+              + otherTag.getDisplayName()
+              + " , "
+              + tagDefinition.getDisplayName()
+              + ")");
+        } else {
+          List<String> otherQueries = otherTag.getQueries();
+          for (String query : tagDefinition.getQueries()) {
+            if (!otherQueries.contains(query)) {
+              otherTag.addQuery(query);
             }
+          }
         }
-        return tagDefinition;
-    }
+      } else {
+        this.tags.add(tagDefinition);
+      }
 
-    public boolean hasTagName(String tagName) {
-        for(SysomosTagDefinition tag : this.tags) {
-            if(tag.hasTagName(tagName)) {
-                return true;
-            }
-        }
-        return false;
     }
-
-    public SysomosTagDefinition getTagWithTagName(String tagName) {
-        for(SysomosTagDefinition tag : this.tags) {
-            if(tag.hasTagName(tagName)) {
-                return tag;
-            }
-        }
-        return null;
+  }
+
+  private SysomosTagDefinition createSysomosTagDefinitionFromNode(Node tag) {
+    Element tagElement = (Element) tag;
+    SysomosTagDefinition tagDefinition = new SysomosTagDefinition(tagElement.getElementsByTagName("name").item(0).getTextContent(),
+        tagElement.getElementsByTagName("displayName").item(0).getTextContent());
+    NodeList taggingRule = tagElement.getElementsByTagName("taggingRule");
+    for (int i = 0; i < taggingRule.getLength(); ++i) {
+      Element rule = (Element) taggingRule.item(i);
+      NodeList queries = rule.getElementsByTagName("query");
+      for (int j = 0; j < queries.getLength(); ++j) {
+        Element query = (Element) queries.item(j);
+        tagDefinition.addQuery(query.getTextContent());
+      }
     }
-
-    public boolean hasTagWithDisplayName(String displayName) {
-        for(SysomosTagDefinition tag : this.tags) {
-            if(tag.hasDisplayName(displayName)) {
-                return true;
-            }
-        }
-        return false;
+    return tagDefinition;
+  }
+
+  /**
+   * hasTagName.
+   * @param tagName tagName
+   * @return hasTagName
+   */
+  public boolean hasTagName(String tagName) {
+    for (SysomosTagDefinition tag : this.tags) {
+      if (tag.hasTagName(tagName)) {
+        return true;
+      }
     }
-
-    public SysomosTagDefinition getTagWithDisplayName(String displayName) {
-        for(SysomosTagDefinition tag : this.tags) {
-            if(tag.hasDisplayName(displayName)) {
-                return tag;
-            }
-        }
-        return null;
-    }
-
-    public List<SysomosTagDefinition> getTagDefinitions() {
-        List<SysomosTagDefinition> result = new ArrayList<SysomosTagDefinition>();
-        result.addAll(this.tags);
-        return result;
+    return false;
+  }
+
+  /**
+   * getTagWithTagName.
+   * @param tagName tagName
+   * @return SysomosTagDefinition
+   */
+  public SysomosTagDefinition getTagWithTagName(String tagName) {
+    for (SysomosTagDefinition tag : this.tags) {
+      if (tag.hasTagName(tagName)) {
+        return tag;
+      }
     }
+    return null;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/SysomosTagDefinition.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/SysomosTagDefinition.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/SysomosTagDefinition.java
index a7a8cd4..889db31 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/SysomosTagDefinition.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/data/SysomosTagDefinition.java
@@ -16,6 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.sysomos.data;
 
 import java.util.ArrayList;
@@ -26,66 +27,75 @@ import java.util.List;
  */
 public class SysomosTagDefinition {
 
-    private String tagName;
-    private String displayName;
-    private List<String> queries;
+  private String tagName;
+  private String displayName;
+  private List<String> queries;
 
-    public SysomosTagDefinition(String tagName, String displayName) {
-        this.tagName = tagName;
-        this.displayName = displayName;
-        this.queries = new ArrayList<String>();
-    }
+  /**
+   * SysomosTagDefinition constructor.
+   * @param tagName tagName
+   * @param displayName displayName
+   */
+  public SysomosTagDefinition(String tagName, String displayName) {
+    this.tagName = tagName;
+    this.displayName = displayName;
+    this.queries = new ArrayList<String>();
+  }
 
-    public String getTagName() {
-        return this.tagName;
-    }
+  public String getTagName() {
+    return this.tagName;
+  }
 
-    public String getDisplayName() {
-        return this.displayName;
-    }
+  public String getDisplayName() {
+    return this.displayName;
+  }
 
-    public List<String> getQueries() {
-        List<String> result = new ArrayList<String>();
-        result.addAll(this.queries);
-        return result;
-    }
+  /**
+   * getQueries.
+   * @return Queries
+   */
+  public List<String> getQueries() {
+    List<String> result = new ArrayList<String>();
+    result.addAll(this.queries);
+    return result;
+  }
 
-    public void addQuery(String query) {
-        this.queries.add(query);
-    }
+  public void addQuery(String query) {
+    this.queries.add(query);
+  }
 
-    public boolean hasTagName(String tagName) {
-        return this.tagName.equals(tagName);
-    }
+  public boolean hasTagName(String tagName) {
+    return this.tagName.equals(tagName);
+  }
 
-    public boolean hasQuery(String query) {
-        return this.queries.contains(query);
-    }
+  public boolean hasQuery(String query) {
+    return this.queries.contains(query);
+  }
 
-    public boolean hasDisplayName(String displayName) {
-        return this.displayName.equals(displayName);
-    }
+  public boolean hasDisplayName(String displayName) {
+    return this.displayName.equals(displayName);
+  }
 
-    @Override
-    public boolean equals(Object o) {
-        if(!(o instanceof SysomosTagDefinition)) {
-            return false;
-        }
-        SysomosTagDefinition that = (SysomosTagDefinition) o;
-        if(!this.tagName.equals(that.tagName)) {
-            return false;
-        }
-        if(!this.displayName.equals(that.displayName)) {
-            return false;
-        }
-        if(this.queries.size() != that.queries.size()) {
-            return false;
-        }
-        for(int i=0; i < this.queries.size(); ++i) {
-            if(!that.queries.contains(this.queries.get(i))) {
-                return false;
-            }
-        }
-        return true;
+  @Override
+  public boolean equals(Object object) {
+    if (!(object instanceof SysomosTagDefinition)) {
+      return false;
+    }
+    SysomosTagDefinition that = (SysomosTagDefinition) object;
+    if (!this.tagName.equals(that.tagName)) {
+      return false;
+    }
+    if (!this.displayName.equals(that.displayName)) {
+      return false;
+    }
+    if (this.queries.size() != that.queries.size()) {
+      return false;
+    }
+    for (int i = 0; i < this.queries.size(); ++i) {
+      if (!that.queries.contains(this.queries.get(i))) {
+        return false;
+      }
     }
+    return true;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/processor/SysomosTypeConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/processor/SysomosTypeConverter.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/processor/SysomosTypeConverter.java
index 3ffd0b3..48615eb 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/processor/SysomosTypeConverter.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/processor/SysomosTypeConverter.java
@@ -19,45 +19,46 @@
 
 package org.apache.streams.sysomos.processor;
 
-import com.google.common.collect.Lists;
-import com.sysomos.xml.BeatApi;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.sysomos.conversion.SysomosBeatActivityConverter;
 
+import com.google.common.collect.Lists;
+import com.sysomos.xml.BeatApi;
+
 import java.util.List;
 
 /**
- * Stream processor that converts Sysomos type to Activity
+ * Stream processor that converts Sysomos type to Activity.
  */
 public class SysomosTypeConverter implements StreamsProcessor {
 
-    public final static String STREAMS_ID = "SysomosTypeConverter";
+  public static final String STREAMS_ID = "SysomosTypeConverter";
 
-    private SysomosBeatActivityConverter converter;
+  private SysomosBeatActivityConverter converter;
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        if(entry.getDocument() instanceof BeatApi.BeatResponse.Beat) {
-            entry.setDocument(converter.convert((BeatApi.BeatResponse.Beat)entry.getDocument()));
-            return Lists.newArrayList(entry);
-        } else {
-            return Lists.newArrayList();
-        }
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    if (entry.getDocument() instanceof BeatApi.BeatResponse.Beat) {
+      entry.setDocument(converter.convert((BeatApi.BeatResponse.Beat)entry.getDocument()));
+      return Lists.newArrayList(entry);
+    } else {
+      return Lists.newArrayList();
     }
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        converter = new SysomosBeatActivityConverter();
-    }
+  @Override
+  public void prepare(Object configurationObject) {
+    converter = new SysomosBeatActivityConverter();
+  }
 
-    @Override
-    public void cleanUp() {
-        //NOP
-    }
+  @Override
+  public void cleanUp() {
+    //NOP
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/AbstractRequestBuilder.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/AbstractRequestBuilder.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/AbstractRequestBuilder.java
index 28f33df..de604b4 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/AbstractRequestBuilder.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/AbstractRequestBuilder.java
@@ -19,40 +19,41 @@
 
 package org.apache.streams.sysomos.provider;
 
-import com.sysomos.xml.BeatApi;
-import com.sysomos.xml.ObjectFactory;
 import org.apache.streams.sysomos.SysomosException;
 import org.apache.streams.sysomos.util.SysomosUtils;
+
+import com.sysomos.xml.BeatApi;
+import com.sysomos.xml.ObjectFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.StringReader;
+import java.net.URL;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 import javax.xml.bind.Unmarshaller;
-import java.io.StringReader;
-import java.net.URL;
 
 /**
  * Defines a common pattern for requesting data from the Sysomos API.
  */
 public abstract class AbstractRequestBuilder implements RequestBuilder {
-    private final static Logger LOGGER = LoggerFactory.getLogger(AbstractRequestBuilder.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractRequestBuilder.class);
 
-    /**
-     * Executes the request to the Sysomos Heartbeat API and returns a valid response
-     */
-    public BeatApi.BeatResponse execute() {
-        URL url = this.getRequestUrl();
-        try {
-            String xmlResponse = SysomosUtils.queryUrl(url);
-            JAXBContext context = JAXBContext.newInstance(ObjectFactory.class);
-            Unmarshaller unmarshaller = context.createUnmarshaller();
-            BeatApi beatApi = (BeatApi) unmarshaller.unmarshal(new StringReader(xmlResponse));
-            return beatApi.getBeatResponse();
-        } catch (JAXBException e) {
-            LOGGER.error("Unable to unmarshal XML content");
-            throw new SysomosException("Unable to unmarshal XML content", e);
-        }
+  /**
+   * Executes the request to the Sysomos Heartbeat API and returns a valid response.
+   */
+  public BeatApi.BeatResponse execute() {
+    URL url = this.getRequestUrl();
+    try {
+      String xmlResponse = SysomosUtils.queryUrl(url);
+      JAXBContext context = JAXBContext.newInstance(ObjectFactory.class);
+      Unmarshaller unmarshaller = context.createUnmarshaller();
+      BeatApi beatApi = (BeatApi) unmarshaller.unmarshal(new StringReader(xmlResponse));
+      return beatApi.getBeatResponse();
+    } catch (JAXBException ex) {
+      LOGGER.error("Unable to unmarshal XML content");
+      throw new SysomosException("Unable to unmarshal XML content", ex);
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/ContentRequestBuilder.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/ContentRequestBuilder.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/ContentRequestBuilder.java
index 178014a..7ae47cf 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/ContentRequestBuilder.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/ContentRequestBuilder.java
@@ -20,6 +20,7 @@
 package org.apache.streams.sysomos.provider;
 
 import org.apache.streams.sysomos.SysomosException;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -27,7 +28,7 @@ import org.slf4j.LoggerFactory;
 import java.net.MalformedURLException;
 import java.net.URL;
 
-import static org.apache.streams.sysomos.util.SysomosUtils.*;
+import static org.apache.streams.sysomos.util.SysomosUtils.SYSOMOS_DATE_FORMATTER;
 
 /**
  * Builds requests for the Sysomos Heartbeat Content API.  This is the preferred method of
@@ -35,102 +36,102 @@ import static org.apache.streams.sysomos.util.SysomosUtils.*;
  */
 public class ContentRequestBuilder extends AbstractRequestBuilder implements RequestBuilder {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ContentRequestBuilder.class);
-
-    private String baseUrl;
-    private String hid;
-    private String addedAfter;
-    private String addedBefore;
-    private String size;
-    private String offset;
-    private String apiKey;
-
-    /**
-     * The max number of items you are allowed to get per request.
-     */
-    public static final int MAX_ALLOWED_PER_REQUEST = 10000;
-
-    /**
-     * Constructs a new ContentRequestBuilder for the specified API key and Sysomos URL
-     * @param baseUrl the base URL for the sysomos API
-     * @param apiKey the API key generated by Sysomos for authorization
-     */
-    protected ContentRequestBuilder(String baseUrl, String apiKey) {
-        this.baseUrl = baseUrl;
-        this.apiKey = apiKey;
-    }
-
-    /**
-     * Gets the Request URL based on the local fields
-     * @return a valid URL for the Sysomos API or an exception
-     */
-    @Override
-    public URL getRequestUrl()  {
-        StringBuilder url = new StringBuilder();
-        url.append(this.baseUrl);
-        url.append("v1/heartbeat/content?");
-        url.append("apiKey=");
-        url.append(this.apiKey);
-        url.append("&hid=");
-        url.append(this.hid);
-        if (size != null) {
-            url.append("&size=");
-            url.append(this.size);
-        }
-        if (this.addedAfter != null) {
-            url.append("&addedAfter=");
-            url.append(this.addedAfter);
-        }
-        if (this.addedBefore != null) {
-            url.append("&addedBefore=");
-            url.append(this.addedBefore);
-        }
-        if (this.offset != null) {
-            url.append("&offset=");
-            url.append(this.offset);
-        }
-        String urlString = url.toString();
-        LOGGER.debug("Constructed Request URL: {}", urlString);
-        try {
-            return new URL(urlString);
-        } catch (MalformedURLException e) {
-            throw new SysomosException("Malformed Request URL.  Check Request Builder parameters", e);
-        }
+  private static final Logger LOGGER = LoggerFactory.getLogger(ContentRequestBuilder.class);
+
+  private String baseUrl;
+  private String hid;
+  private String addedAfter;
+  private String addedBefore;
+  private String size;
+  private String offset;
+  private String apiKey;
+
+  /**
+   * The max number of items you are allowed to get per request.
+   */
+  public static final int MAX_ALLOWED_PER_REQUEST = 10000;
+
+  /**
+   * Constructs a new ContentRequestBuilder for the specified API key and Sysomos URL.
+   * @param baseUrl the base URL for the sysomos API
+   * @param apiKey the API key generated by Sysomos for authorization
+   */
+  protected ContentRequestBuilder(String baseUrl, String apiKey) {
+    this.baseUrl = baseUrl;
+    this.apiKey = apiKey;
+  }
+
+  /**
+   * Gets the Request URL based on the local fields.
+   * @return a valid URL for the Sysomos API or an exception
+   */
+  @Override
+  public URL getRequestUrl()  {
+    StringBuilder url = new StringBuilder();
+    url.append(this.baseUrl);
+    url.append("v1/heartbeat/content?");
+    url.append("apiKey=");
+    url.append(this.apiKey);
+    url.append("&hid=");
+    url.append(this.hid);
+    if (size != null) {
+      url.append("&size=");
+      url.append(this.size);
     }
-
-    @Override
-    public RequestBuilder setHeartBeatId(int hid) {
-        return setHeartBeatId(Integer.toString(hid));
+    if (this.addedAfter != null) {
+      url.append("&addedAfter=");
+      url.append(this.addedAfter);
     }
-
-    @Override
-    public RequestBuilder setHeartBeatId(String hid) {
-        this.hid = hid;
-        return this;
+    if (this.addedBefore != null) {
+      url.append("&addedBefore=");
+      url.append(this.addedBefore);
     }
-
-    @Override
-    public RequestBuilder setAddedAfterDate(DateTime afterDate) {
-        this.addedAfter = SYSOMOS_DATE_FORMATTER.print(afterDate);
-        return this;
+    if (this.offset != null) {
+      url.append("&offset=");
+      url.append(this.offset);
     }
-
-    @Override
-    public RequestBuilder setAddedBeforeDate(DateTime beforeDate) {
-        this.addedBefore = SYSOMOS_DATE_FORMATTER.print(beforeDate);
-        return this;
-    }
-
-    @Override
-    public RequestBuilder setReturnSetSize(long size) {
-        this.size = Long.toString(Math.min(size, MAX_ALLOWED_PER_REQUEST));
-        return this;
-    }
-
-    @Override
-    public RequestBuilder setOffset(long offset) {
-        this.offset = Long.toString(offset);
-        return this;
+    String urlString = url.toString();
+    LOGGER.debug("Constructed Request URL: {}", urlString);
+    try {
+      return new URL(urlString);
+    } catch (MalformedURLException ex) {
+      throw new SysomosException("Malformed Request URL.  Check Request Builder parameters", ex);
     }
+  }
+
+  @Override
+  public RequestBuilder setHeartBeatId(int hid) {
+    return setHeartBeatId(Integer.toString(hid));
+  }
+
+  @Override
+  public RequestBuilder setHeartBeatId(String hid) {
+    this.hid = hid;
+    return this;
+  }
+
+  @Override
+  public RequestBuilder setAddedAfterDate(DateTime afterDate) {
+    this.addedAfter = SYSOMOS_DATE_FORMATTER.print(afterDate);
+    return this;
+  }
+
+  @Override
+  public RequestBuilder setAddedBeforeDate(DateTime beforeDate) {
+    this.addedBefore = SYSOMOS_DATE_FORMATTER.print(beforeDate);
+    return this;
+  }
+
+  @Override
+  public RequestBuilder setReturnSetSize(long size) {
+    this.size = Long.toString(Math.min(size, MAX_ALLOWED_PER_REQUEST));
+    return this;
+  }
+
+  @Override
+  public RequestBuilder setOffset(long offset) {
+    this.offset = Long.toString(offset);
+    return this;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/RequestBuilder.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/RequestBuilder.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/RequestBuilder.java
index 0e12025..53887af 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/RequestBuilder.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/RequestBuilder.java
@@ -25,72 +25,73 @@ import org.joda.time.DateTime;
 import java.net.URL;
 
 /**
- * Simplifying abstraction that aids in building a request to the Sysomos API in a chained fashion
+ * Simplifying abstraction that aids in building a request to the Sysomos API in a chained fashion.
  */
 public interface RequestBuilder {
-    /**
-     * Sets the date after which documents should be returned from Sysomos
-     * @param afterDate the {@link org.joda.time.DateTime} instance representing the after date
-     *
-     * @return The RequestBuilder for continued Chaining
-     */
-    RequestBuilder setAddedAfterDate(DateTime afterDate);
 
-    /**
-     * Sets the date before which documents should be returned from Sysomos
-     * @param beforeDate the {@link org.joda.time.DateTime} instance representing the before date
-     *
-     * @return The RequestBuilder for continued Chaining
-     */
-    RequestBuilder setAddedBeforeDate(DateTime beforeDate);
+  /**
+   * Sets the date after which documents should be returned from Sysomos.
+   * @param afterDate the {@link org.joda.time.DateTime} instance representing the after date
+   *
+   * @return The RequestBuilder for continued Chaining
+   */
+  RequestBuilder setAddedAfterDate(DateTime afterDate);
 
-    /**
-     * Sets the size of the expected response
-     * @param size the number of documents
-     *
-     * @return The RequestBuilder for continued Chaining
-     */
-    RequestBuilder setReturnSetSize(long size);
+  /**
+   * Sets the date before which documents should be returned from Sysomos.
+   * @param beforeDate the {@link org.joda.time.DateTime} instance representing the before date
+   *
+   * @return The RequestBuilder for continued Chaining
+   */
+  RequestBuilder setAddedBeforeDate(DateTime beforeDate);
 
-    /**
-     * Sets the starting offset for the number of documents given the other parameters
-     * @param offset the starting offset
-     *
-     * @return The RequestBuilder for continued Chaining
-     */
-    RequestBuilder setOffset(long offset);
+  /**
+   * Sets the size of the expected response.
+   * @param size the number of documents
+   *
+   * @return The RequestBuilder for continued Chaining
+   */
+  RequestBuilder setReturnSetSize(long size);
 
-    /**
-     * Sets the Sysomos Heartbeat ID
-     * @param hid Heartbeat ID
-     *
-     * @return The RequestBuilder for continued Chaining
-     */
-    RequestBuilder setHeartBeatId(int hid);
+  /**
+   * Sets the starting offset for the number of documents given the other parameters.
+   * @param offset the starting offset
+   *
+   * @return The RequestBuilder for continued Chaining
+   */
+  RequestBuilder setOffset(long offset);
 
-    /**
-     *
-     * Sets the Sysomos Heartbeat ID as a String
-     * @param hid Heartbeat ID string
-     *
-     * @return The RequestBuilder for continued Chaining
-     */
-    RequestBuilder setHeartBeatId(String hid);
+  /**
+   * Sets the Sysomos Heartbeat ID.
+   * @param hid Heartbeat ID
+   *
+   * @return The RequestBuilder for continued Chaining
+   */
+  RequestBuilder setHeartBeatId(int hid);
 
-    /**
-     * Returns the full url need to execute a request.
-     *
-     * Example:
-     * http://api.sysomos.com/dev/v1/heartbeat/content?apiKey=YOUR
-     * -APIKEY&hid=YOUR-HEARTBEAT-ID&offset=0&size=10&
-     * addedAfter=2010-10-15T13:00:00Z&addedBefore=2010-10-18T13:00:00Z
-     *
-     * @return the URL to use when making requests of Sysomos Heartbeat
-     */
-    URL getRequestUrl();
+  /**
+   * Sets the Sysomos Heartbeat ID as a String.
+   * @param hid Heartbeat ID string
+   *
+   * @return The RequestBuilder for continued Chaining
+   */
+  RequestBuilder setHeartBeatId(String hid);
 
-    /**
-     * Executes the request to the Sysomos Heartbeat API and returns a valid response
-     */
-    BeatApi.BeatResponse execute();
+  /**
+   * Returns the full url need to execute a request.
+   *
+   * <p/>
+   * Example:
+   * http://api.sysomos.com/dev/v1/heartbeat/content?apiKey=YOUR
+   * -APIKEY&hid=YOUR-HEARTBEAT-ID&offset=0&size=10&
+   * addedAfter=2010-10-15T13:00:00Z&addedBefore=2010-10-18T13:00:00Z
+   *
+   * @return the URL to use when making requests of Sysomos Heartbeat
+   */
+  URL getRequestUrl();
+
+  /**
+   * Executes the request to the Sysomos Heartbeat API and returns a valid response
+   */
+  BeatApi.BeatResponse execute();
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosClient.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosClient.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosClient.java
index 488b6c7..6b59d1e 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosClient.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosClient.java
@@ -30,26 +30,19 @@ import java.net.URL;
  */
 public class SysomosClient {
 
-    public static final String BASE_URL_STRING = "http://api.sysomos.com/";
-    private static final String HEARTBEAT_INFO_URL = "http://api.sysomos.com/v1/heartbeat/info?apiKey={api_key}&hid={hid}";
+  public static final String BASE_URL_STRING = "http://api.sysomos.com/";
+  private static final String HEARTBEAT_INFO_URL = "http://api.sysomos.com/v1/heartbeat/info?apiKey={api_key}&hid={hid}";
 
-    private String apiKey;
+  private String apiKey;
 
-    private HttpURLConnection client;
+  private HttpURLConnection client;
 
-    public SysomosClient(String apiKey) {
-        this.apiKey = apiKey;
-    }
+  public SysomosClient(String apiKey) {
+    this.apiKey = apiKey;
+  }
 
-    public HeartbeatInfo getHeartbeatInfo(String hid) throws Exception {
-        String urlString = HEARTBEAT_INFO_URL.replace("{api_key}", this.apiKey);
-        urlString = urlString.replace("{hid}", hid);
-        String xmlResponse = SysomosUtils.queryUrl(new URL(urlString));
-        return new HeartbeatInfo(xmlResponse);
-    }
-
-    public RequestBuilder createRequestBuilder() {
-        return new ContentRequestBuilder(BASE_URL_STRING, this.apiKey);
-    }
+  public RequestBuilder createRequestBuilder() {
+    return new ContentRequestBuilder(BASE_URL_STRING, this.apiKey);
+  }
 
 }


[15/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/jackson/ThroughputQueueDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/jackson/ThroughputQueueDeserializer.java b/streams-monitoring/src/main/java/org/apache/streams/jackson/ThroughputQueueDeserializer.java
index e4d883d..35dbcd5 100644
--- a/streams-monitoring/src/main/java/org/apache/streams/jackson/ThroughputQueueDeserializer.java
+++ b/streams-monitoring/src/main/java/org/apache/streams/jackson/ThroughputQueueDeserializer.java
@@ -15,73 +15,78 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.jackson;
 
+import org.apache.streams.pojo.json.ThroughputQueueBroadcast;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.JsonDeserializer;
 import com.fasterxml.jackson.databind.JsonNode;
-import org.apache.streams.pojo.json.ThroughputQueueBroadcast;
 import org.slf4j.Logger;
 
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.util.Arrays;
 import javax.management.MBeanAttributeInfo;
 import javax.management.MBeanInfo;
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
-import java.io.IOException;
-import java.lang.management.ManagementFactory;
-import java.util.Arrays;
 
 public class ThroughputQueueDeserializer extends JsonDeserializer<ThroughputQueueBroadcast> {
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(ThroughputQueueDeserializer.class);
 
-    public ThroughputQueueDeserializer() {
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(ThroughputQueueDeserializer.class);
 
-    }
+  public ThroughputQueueDeserializer() {
 
-    @Override
-    public ThroughputQueueBroadcast deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
-        try {
-            MBeanServer server = ManagementFactory.getPlatformMBeanServer();
+  }
 
-            ThroughputQueueBroadcast throughputQueueBroadcast = new ThroughputQueueBroadcast();
-            JsonNode attributes = jsonParser.getCodec().readTree(jsonParser);
+  @Override
+  public ThroughputQueueBroadcast deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
+    try {
+      MBeanServer server = ManagementFactory.getPlatformMBeanServer();
 
-            ObjectName name = new ObjectName(attributes.get("canonicalName").asText());
-            MBeanInfo info = server.getMBeanInfo(name);
-            throughputQueueBroadcast.setName(name.toString());
+      ThroughputQueueBroadcast throughputQueueBroadcast = new ThroughputQueueBroadcast();
+      JsonNode attributes = jsonParser.getCodec().readTree(jsonParser);
 
-            for (MBeanAttributeInfo attribute : Arrays.asList(info.getAttributes())) {
-                try {
-                    switch(attribute.getName()) {
-                        case "CurrentSize":
-                            throughputQueueBroadcast.setCurrentSize((long) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "AvgWait":
-                            throughputQueueBroadcast.setAvgWait((double) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "MaxWait":
-                            throughputQueueBroadcast.setMaxWait((long) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "Removed":
-                            throughputQueueBroadcast.setRemoved((long) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "Added":
-                            throughputQueueBroadcast.setAdded((long) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "Throughput":
-                            throughputQueueBroadcast.setThroughput((double) server.getAttribute(name, attribute.getName()));
-                            break;
-                    }
-                } catch (Exception e) {
-                    LOGGER.error("Exception while trying to deserialize ThroughputQueueBroadcast object: {}", e);
-                }
-            }
+      ObjectName name = new ObjectName(attributes.get("canonicalName").asText());
+      MBeanInfo info = server.getMBeanInfo(name);
+      throughputQueueBroadcast.setName(name.toString());
 
-            return throughputQueueBroadcast;
-        } catch (Exception e) {
-            return null;
+      for (MBeanAttributeInfo attribute : Arrays.asList(info.getAttributes())) {
+        try {
+          switch (attribute.getName()) {
+            case "CurrentSize":
+              throughputQueueBroadcast.setCurrentSize((long) server.getAttribute(name, attribute.getName()));
+              break;
+            case "AvgWait":
+              throughputQueueBroadcast.setAvgWait((double) server.getAttribute(name, attribute.getName()));
+              break;
+            case "MaxWait":
+              throughputQueueBroadcast.setMaxWait((long) server.getAttribute(name, attribute.getName()));
+              break;
+            case "Removed":
+              throughputQueueBroadcast.setRemoved((long) server.getAttribute(name, attribute.getName()));
+              break;
+            case "Added":
+              throughputQueueBroadcast.setAdded((long) server.getAttribute(name, attribute.getName()));
+              break;
+            case "Throughput":
+              throughputQueueBroadcast.setThroughput((double) server.getAttribute(name, attribute.getName()));
+              break;
+            default:
+              break;
+          }
+        } catch (Exception ex) {
+          LOGGER.error("Exception while trying to deserialize ThroughputQueueBroadcast object: {}", ex);
         }
+      }
+
+      return throughputQueueBroadcast;
+    } catch (Exception ex) {
+      return null;
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/MessagePersister.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/MessagePersister.java b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/MessagePersister.java
index 28c7fa7..667e9f6 100644
--- a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/MessagePersister.java
+++ b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/MessagePersister.java
@@ -15,20 +15,21 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.monitoring.persist;
 
 import java.util.List;
 
 /**
- * Interface to define how we persist messages (JMX/monitoring related)
+ * Interface to define how we persist messages (JMX/monitoring related).
  */
 public interface MessagePersister {
 
-    /**
-     * Given a list of messages, persist them out through whatever appropriate
-     * broadcast mechanism (HTTP request, SLF4J log, etc.)
-     * @param messages
-     * @return statusCode represents whether or not the persist was successful
-     */
-    int persistMessages(List<String> messages);
+  /**
+   * Given a list of messages, persist them out through whatever appropriate
+   * broadcast mechanism (HTTP request, SLF4J log, etc.).
+   * @param messages List of String messages
+   * @return statusCode represents whether or not the persist was successful
+   */
+  int persistMessages(List<String> messages);
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersister.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersister.java b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersister.java
index bf0591f..1466f31 100644
--- a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersister.java
+++ b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersister.java
@@ -15,8 +15,11 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.monitoring.persist.impl;
 
+import org.apache.streams.monitoring.persist.MessagePersister;
+
 import com.google.common.collect.Lists;
 import org.apache.http.HttpResponse;
 import org.apache.http.NameValuePair;
@@ -25,70 +28,70 @@ import org.apache.http.client.entity.UrlEncodedFormEntity;
 import org.apache.http.client.methods.HttpPost;
 import org.apache.http.impl.client.HttpClients;
 import org.apache.http.message.BasicNameValuePair;
-import org.apache.streams.monitoring.persist.MessagePersister;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.List;
 
 public class BroadcastMessagePersister implements MessagePersister {
-    private final static Logger LOGGER = LoggerFactory.getLogger(BroadcastMessagePersister.class);
-    private String broadcastURI;
 
-    public BroadcastMessagePersister(String broadcastURI) {
-        this.broadcastURI = broadcastURI;
-    }
+  private static final Logger LOGGER = LoggerFactory.getLogger(BroadcastMessagePersister.class);
+  private String broadcastUri;
 
-    @Override
-    /**
-     * Given a list of messages as Strings, broadcast them to the broadcastURI
-     * (if one is defined)
-     * @param messages
-     * @return int status code from POST response
-     */
-    public int persistMessages(List<String> messages) {
-        int responseCode = -1;
+  public BroadcastMessagePersister(String broadcastUri) {
+    this.broadcastUri = broadcastUri;
+  }
 
-        if(broadcastURI != null) {
-            try {
-                HttpClient client = HttpClients.createDefault();
-                HttpPost post = new HttpPost(broadcastURI);
+  @Override
+  /**
+   * Given a list of messages as Strings, broadcast them to the broadcastUri
+   * (if one is defined)
+   * @param messages
+   * @return int status code from POST response
+   */
+  public int persistMessages(List<String> messages) {
+    int responseCode = -1;
 
-                post.setHeader("User-Agent", "Streams");
+    if (broadcastUri != null) {
+      try {
+        HttpClient client = HttpClients.createDefault();
+        HttpPost post = new HttpPost(broadcastUri);
 
-                List<NameValuePair> urlParameters = Lists.newArrayList();
-                urlParameters.add(new BasicNameValuePair("messages", serializeMessages(messages)));
+        post.setHeader("User-Agent", "Streams");
 
-                post.setEntity(new UrlEncodedFormEntity(urlParameters, "UTF-8"));
+        List<NameValuePair> urlParameters = Lists.newArrayList();
+        urlParameters.add(new BasicNameValuePair("messages", serializeMessages(messages)));
 
-                HttpResponse response = client.execute(post);
-                responseCode = response.getStatusLine().getStatusCode();
+        post.setEntity(new UrlEncodedFormEntity(urlParameters, "UTF-8"));
 
-                LOGGER.debug("Broadcast {} messages to URI: {}", messages.size(), broadcastURI);
-            } catch (Exception e) {
-                LOGGER.error("Failed to broadcast message to URI: {}, exception: {}", broadcastURI, e);
-            }
-        }
+        HttpResponse response = client.execute(post);
+        responseCode = response.getStatusLine().getStatusCode();
 
-        return responseCode;
+        LOGGER.debug("Broadcast {} messages to URI: {}", messages.size(), broadcastUri);
+      } catch (Exception ex) {
+        LOGGER.error("Failed to broadcast message to URI: {}, exception: {}", broadcastUri, ex);
+      }
     }
 
-    /**
-     * Given a List of String messages, convert them to a JSON array
-     * @param messages
-     * @return Serialized version of this JSON array
-     */
-    private String serializeMessages(List<String> messages) {
-        String ser = "{\"messages\":[";
-
-        for(String message : messages) {
-            if(messages.get(messages.size()-1).equals(message)) {
-                ser += message + "]}";
-            } else {
-                ser += message + ",";
-            }
-        }
-
-        return ser;
+    return responseCode;
+  }
+
+  /**
+   * Given a List of String messages, convert them to a JSON array.
+   * @param messages List of String messages
+   * @return Serialized version of this JSON array
+   */
+  private String serializeMessages(List<String> messages) {
+    String ser = "{\"messages\":[";
+
+    for (String message : messages) {
+      if (messages.get(messages.size() - 1).equals(message)) {
+        ser += message + "]}";
+      } else {
+        ser += message + ",";
+      }
     }
+
+    return ser;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersister.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersister.java b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersister.java
index 312502c..c697661 100644
--- a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersister.java
+++ b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersister.java
@@ -15,90 +15,93 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.monitoring.persist.impl;
 
 import org.apache.streams.monitoring.persist.MessagePersister;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.net.DatagramPacket;
 import java.net.DatagramSocket;
-import java.net.InetAddress;
 import java.net.InetSocketAddress;
-import java.net.SocketAddress;
 import java.net.SocketException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.nio.ByteBuffer;
 import java.util.List;
-import java.util.concurrent.Executors;
 
 public class LogstashUdpMessagePersister implements MessagePersister {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(LogstashUdpMessagePersister.class);
-    private String broadcastURI;
-    URI uri;
+  private static final Logger LOGGER = LoggerFactory.getLogger(LogstashUdpMessagePersister.class);
+  private String broadcastUri;
+  URI uri;
+
+  public LogstashUdpMessagePersister(String broadcastUri) {
+    this.broadcastUri = broadcastUri;
+    setup();
+  }
 
-    public LogstashUdpMessagePersister(String broadcastURI) {
-        this.broadcastURI = broadcastURI;
-        setup();
+  /**
+   * setup.
+   */
+  public void setup() {
+
+    try {
+      uri = new URI(broadcastUri);
+    } catch (URISyntaxException ex) {
+      LOGGER.error(ex.getMessage());
     }
 
-    public void setup() {
+  }
 
-        try {
-            uri = new URI(broadcastURI);
-        } catch (URISyntaxException e) {
-            LOGGER.error(e.getMessage());
-        }
+  @Override
+  /**
+   * Given a list of messages as Strings, broadcast them to the broadcastUri
+   * (if one is defined)
+   * @param messages
+   * @return int status code from POST response
+   */
+  public int persistMessages(List<String> messages) {
+    int responseCode = -1;
 
-    }
-    @Override
-    /**
-     * Given a list of messages as Strings, broadcast them to the broadcastURI
-     * (if one is defined)
-     * @param messages
-     * @return int status code from POST response
-     */
-    public int persistMessages(List<String> messages) {
-        int responseCode = -1;
-
-        if(broadcastURI != null) {
-            DatagramSocket socket = null;
-            try {
-                socket = new DatagramSocket();
-            } catch (SocketException e) {
-                LOGGER.error("Metrics Broadcast Setup Failed: " + e.getMessage());
-            }
-            try {
-                ByteBuffer toWrite = ByteBuffer.wrap(serializeMessages(messages).getBytes());
-                byte[] byteArray = toWrite.array();
-                DatagramPacket packet = new DatagramPacket(byteArray, byteArray.length);
-                socket.connect(new InetSocketAddress(uri.getHost(), uri.getPort()));
-                socket.send(packet);
-            } catch( Exception e ) {
-                LOGGER.error("Metrics Broadcast Failed: " + e.getMessage());
-            } finally {
-                socket.close();
-            }
-        }
-
-        return responseCode;
+    if (broadcastUri != null) {
+      DatagramSocket socket = null;
+      try {
+        socket = new DatagramSocket();
+      } catch (SocketException ex) {
+        LOGGER.error("Metrics Broadcast Setup Failed: " + ex.getMessage());
+      }
+      try {
+        ByteBuffer toWrite = ByteBuffer.wrap(serializeMessages(messages).getBytes());
+        byte[] byteArray = toWrite.array();
+        DatagramPacket packet = new DatagramPacket(byteArray, byteArray.length);
+        socket.connect(new InetSocketAddress(uri.getHost(), uri.getPort()));
+        socket.send(packet);
+      } catch ( Exception ex ) {
+        LOGGER.error("Metrics Broadcast Failed: " + ex.getMessage());
+      } finally {
+        socket.close();
+      }
     }
 
-    /**
-     * Given a List of String messages, convert them to a JSON array
-     * @param messages
-     * @return Serialized version of this JSON array
-     */
-    private String serializeMessages(List<String> messages) {
+    return responseCode;
+  }
 
-        StringBuilder json_lines = new StringBuilder();
-        for(String message : messages) {
-            json_lines.append(message).append('\n');
-        }
+  /**
+   * Given a List of String messages, convert them to a JSON array.
+   * @param messages List of String messages
+   * @return Serialized version of this JSON array
+   */
+  private String serializeMessages(List<String> messages) {
 
-        return json_lines.toString();
+    StringBuilder jsonLines = new StringBuilder();
+    for (String message : messages) {
+      jsonLines.append(message).append('\n');
     }
 
+    return jsonLines.toString();
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/SLF4JMessagePersister.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/SLF4JMessagePersister.java b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/SLF4JMessagePersister.java
deleted file mode 100644
index 19c36f2..0000000
--- a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/SLF4JMessagePersister.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.monitoring.persist.impl;
-
-import org.apache.streams.monitoring.persist.MessagePersister;
-import org.slf4j.Logger;
-
-import java.util.List;
-
-public class SLF4JMessagePersister implements MessagePersister {
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(SLF4JMessagePersister.class);
-    private static final int SUCCESS_STATUS = 0;
-    private static final int FAILURE_STATUS = -1;
-
-    public SLF4JMessagePersister() {
-
-    }
-
-    @Override
-    public int persistMessages(List<String> messages) {
-        for(String message : messages) {
-            LOGGER.info(message);
-        }
-
-        return SUCCESS_STATUS;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/Slf4jMessagePersister.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/Slf4jMessagePersister.java b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/Slf4jMessagePersister.java
new file mode 100644
index 0000000..b237871
--- /dev/null
+++ b/streams-monitoring/src/main/java/org/apache/streams/monitoring/persist/impl/Slf4jMessagePersister.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.monitoring.persist.impl;
+
+import org.apache.streams.monitoring.persist.MessagePersister;
+
+import org.slf4j.Logger;
+
+import java.util.List;
+
+/**
+ * Persist montoring messages to SLF4J.
+ */
+public class Slf4jMessagePersister implements MessagePersister {
+
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(Slf4jMessagePersister.class);
+  private static final int SUCCESS_STATUS = 0;
+  private static final int FAILURE_STATUS = -1;
+
+  public Slf4jMessagePersister() {
+
+  }
+
+  @Override
+  public int persistMessages(List<String> messages) {
+
+    for (String message : messages) {
+      LOGGER.info(message);
+    }
+
+    return SUCCESS_STATUS;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThread.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThread.java b/streams-monitoring/src/main/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThread.java
index f21a212..a797ce5 100644
--- a/streams-monitoring/src/main/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThread.java
+++ b/streams-monitoring/src/main/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThread.java
@@ -15,189 +15,204 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.monitoring.tasks;
 
-import com.fasterxml.jackson.databind.DeserializationFeature;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.module.SimpleModule;
-import com.google.common.collect.Lists;
 import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.jackson.*;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.jackson.DatumStatusCounterDeserializer;
+import org.apache.streams.jackson.MemoryUsageDeserializer;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.jackson.StreamsTaskCounterDeserializer;
+import org.apache.streams.jackson.ThroughputQueueDeserializer;
 import org.apache.streams.local.monitoring.MonitoringConfiguration;
 import org.apache.streams.monitoring.persist.MessagePersister;
 import org.apache.streams.monitoring.persist.impl.BroadcastMessagePersister;
 import org.apache.streams.monitoring.persist.impl.LogstashUdpMessagePersister;
-import org.apache.streams.monitoring.persist.impl.SLF4JMessagePersister;
+import org.apache.streams.monitoring.persist.impl.Slf4jMessagePersister;
 import org.apache.streams.pojo.json.Broadcast;
 import org.apache.streams.pojo.json.DatumStatusCounterBroadcast;
 import org.apache.streams.pojo.json.MemoryUsageBroadcast;
 import org.apache.streams.pojo.json.StreamsTaskCounterBroadcast;
 import org.apache.streams.pojo.json.ThroughputQueueBroadcast;
+
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+import com.google.common.collect.Lists;
 import org.slf4j.Logger;
 
-import javax.management.*;
 import java.lang.management.ManagementFactory;
 import java.net.URI;
-import java.net.URISyntaxException;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import javax.management.MBeanServer;
+import javax.management.NotificationBroadcasterSupport;
+import javax.management.ObjectName;
 
 /**
  * This thread runs inside of a Streams runtime and periodically persists information
- * from relevant JMX beans
+ * from relevant JMX beans.
  */
 public class BroadcastMonitorThread extends NotificationBroadcasterSupport implements Runnable {
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(BroadcastMonitorThread.class);
-    private static MBeanServer server;
-
-    private MonitoringConfiguration configuration;
-    private URI broadcastURI = null;
-    private MessagePersister messagePersister;
-    private volatile boolean keepRunning;
-
-    private static ObjectMapper objectMapper = StreamsJacksonMapper.getInstance();
-
-    /**
-     * DEPRECATED
-     * Please initialize logging with monitoring object via typesafe
-     * @param streamConfig
-     */
-    @Deprecated
-    public BroadcastMonitorThread(Map<String, Object> streamConfig) {
-        this(objectMapper.convertValue(streamConfig, MonitoringConfiguration.class));
-    }
-
-    public BroadcastMonitorThread(StreamsConfiguration streamConfig) {
-        this(objectMapper.convertValue(streamConfig.getAdditionalProperties().get("monitoring"), MonitoringConfiguration.class));
-    }
-
-    public BroadcastMonitorThread(MonitoringConfiguration configuration) {
-
-        this.configuration = configuration;
-        if( this.configuration == null )
-            this.configuration = new ComponentConfigurator<>(MonitoringConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().atPath("monitoring"));
-
-        LOGGER.info("BroadcastMonitorThread created");
-
-        initializeObjectMapper();
-
-        prepare();
-
-        LOGGER.info("BroadcastMonitorThread initialized");
 
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(BroadcastMonitorThread.class);
+  private static MBeanServer server;
+
+  private MonitoringConfiguration configuration;
+  private URI broadcastUri = null;
+  private MessagePersister messagePersister;
+  private volatile boolean keepRunning;
+
+  private static ObjectMapper objectMapper = StreamsJacksonMapper.getInstance();
+
+  /**
+   * DEPRECATED
+   * Please initialize logging with monitoring object via typesafe.
+   * @param streamConfig streamConfig map.
+   */
+  @Deprecated
+  public BroadcastMonitorThread(Map<String, Object> streamConfig) {
+    this(objectMapper.convertValue(streamConfig, MonitoringConfiguration.class));
+  }
+
+  public BroadcastMonitorThread(StreamsConfiguration streamConfig) {
+    this(objectMapper.convertValue(streamConfig.getAdditionalProperties().get("monitoring"), MonitoringConfiguration.class));
+  }
+
+  /**
+   * BroadcastMonitorThread constructor - uses supplied MonitoringConfiguration.
+   * @param configuration MonitoringConfiguration
+   */
+  public BroadcastMonitorThread(MonitoringConfiguration configuration) {
+
+    this.configuration = configuration;
+    if ( this.configuration == null ) {
+      this.configuration = new ComponentConfigurator<>(MonitoringConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().atPath("monitoring"));
     }
 
-    /**
-     * Initialize our object mapper with all of our bean's custom deserializers
-     * This way we can convert them to and from Strings dictated by our
-     * POJOs which are generated from JSON schemas
-     */
-    private void initializeObjectMapper() {
-        SimpleModule simpleModule = new SimpleModule();
-
-        simpleModule.addDeserializer(MemoryUsageBroadcast.class, new MemoryUsageDeserializer());
-        simpleModule.addDeserializer(ThroughputQueueBroadcast.class, new ThroughputQueueDeserializer());
-        simpleModule.addDeserializer(StreamsTaskCounterBroadcast.class, new StreamsTaskCounterDeserializer());
-        simpleModule.addDeserializer(DatumStatusCounterBroadcast.class, new DatumStatusCounterDeserializer());
-
-        objectMapper.registerModule(simpleModule);
-        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-    }
+    LOGGER.info("BroadcastMonitorThread created");
+
+    initializeObjectMapper();
+
+    prepare();
+
+    LOGGER.info("BroadcastMonitorThread initialized");
+
+  }
+
+  /**
+   * Initialize our object mapper with all of our bean's custom deserializers.
+   * This way we can convert them to and from Strings dictated by our
+   * POJOs which are generated from JSON schemas.
+   */
+  private void initializeObjectMapper() {
+    SimpleModule simpleModule = new SimpleModule();
+
+    simpleModule.addDeserializer(MemoryUsageBroadcast.class, new MemoryUsageDeserializer());
+    simpleModule.addDeserializer(ThroughputQueueBroadcast.class, new ThroughputQueueDeserializer());
+    simpleModule.addDeserializer(StreamsTaskCounterBroadcast.class, new StreamsTaskCounterDeserializer());
+    simpleModule.addDeserializer(DatumStatusCounterBroadcast.class, new DatumStatusCounterDeserializer());
+
+    objectMapper.registerModule(simpleModule);
+    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
+
+  /**
+   * Get all relevant JMX beans, convert their values to strings, and then persist them.
+   */
+  @Override
+  public void run() {
+    LOGGER.info("BroadcastMonitorThread running");
+    while (keepRunning) {
+      try {
+        List<String> messages = Lists.newArrayList();
+        Set<ObjectName> beans = server.queryNames(null, null);
+
+        for (ObjectName name : beans) {
+          String item = objectMapper.writeValueAsString(name);
+          Broadcast broadcast = null;
+
+          if (name.getKeyPropertyList().get("type") != null) {
+            if (name.getKeyPropertyList().get("type").equals("ThroughputQueue")) {
+              broadcast = objectMapper.readValue(item, ThroughputQueueBroadcast.class);
+            } else if (name.getKeyPropertyList().get("type").equals("StreamsTaskCounter")) {
+              broadcast = objectMapper.readValue(item, StreamsTaskCounterBroadcast.class);
+            } else if (name.getKeyPropertyList().get("type").equals("DatumStatusCounter")) {
+              broadcast = objectMapper.readValue(item, DatumStatusCounterBroadcast.class);
+            } else if (name.getKeyPropertyList().get("type").equals("Memory")) {
+              broadcast = objectMapper.readValue(item, MemoryUsageBroadcast.class);
+            }
 
-    /**
-     * Get all relevant JMX beans, convert their values to strings, and then persist them
-     */
-    @Override
-    public void run() {
-        LOGGER.info("BroadcastMonitorThread running");
-        while(keepRunning) {
-            try {
-                List<String> messages = Lists.newArrayList();
-                Set<ObjectName> beans = server.queryNames(null, null);
-
-                for(ObjectName name : beans) {
-                    String item = objectMapper.writeValueAsString(name);
-                    Broadcast broadcast = null;
-
-                    if(name.getKeyPropertyList().get("type") != null) {
-                        if (name.getKeyPropertyList().get("type").equals("ThroughputQueue")) {
-                            broadcast = objectMapper.readValue(item, ThroughputQueueBroadcast.class);
-                        } else if (name.getKeyPropertyList().get("type").equals("StreamsTaskCounter")) {
-                            broadcast = objectMapper.readValue(item, StreamsTaskCounterBroadcast.class);
-                        } else if (name.getKeyPropertyList().get("type").equals("DatumStatusCounter")) {
-                            broadcast = objectMapper.readValue(item, DatumStatusCounterBroadcast.class);
-                        } else if (name.getKeyPropertyList().get("type").equals("Memory")) {
-                            broadcast = objectMapper.readValue(item, MemoryUsageBroadcast.class);
-                        }
-
-                        if(broadcast != null) {
-                            messages.add(objectMapper.writeValueAsString(broadcast));
-                        }
-                    }
-                }
-
-                messagePersister.persistMessages(messages);
-                Thread.sleep(configuration.getMonitoringBroadcastIntervalMs());
-            } catch (InterruptedException e) {
-                LOGGER.debug("Broadcast Monitor Interrupted!");
-                Thread.currentThread().interrupt();
-                this.keepRunning = false;
-            } catch (Exception e) {
-                LOGGER.error("Exception: {}", e);
-                this.keepRunning = false;
+            if (broadcast != null) {
+              messages.add(objectMapper.writeValueAsString(broadcast));
             }
+          }
         }
+
+        messagePersister.persistMessages(messages);
+        Thread.sleep(configuration.getMonitoringBroadcastIntervalMs());
+      } catch (InterruptedException ex) {
+        LOGGER.debug("Broadcast Monitor Interrupted!");
+        Thread.currentThread().interrupt();
+        this.keepRunning = false;
+      } catch (Exception ex) {
+        LOGGER.error("Exception: {}", ex);
+        this.keepRunning = false;
+      }
     }
+  }
 
-    public void prepare() {
+  /**
+   * prepare for execution.
+   */
+  public void prepare() {
 
-        keepRunning = true;
+    keepRunning = true;
 
-        LOGGER.info("BroadcastMonitorThread setup " + this.configuration);
+    LOGGER.info("BroadcastMonitorThread setup " + this.configuration);
 
-        server = ManagementFactory.getPlatformMBeanServer();
+    server = ManagementFactory.getPlatformMBeanServer();
 
-        if (this.configuration != null &&
-            this.configuration.getBroadcastURI() != null) {
+    if (this.configuration != null && this.configuration.getBroadcastURI() != null) {
 
-            try {
-                broadcastURI = new URI(configuration.getBroadcastURI());
-            } catch (Exception e) {
-                LOGGER.error("invalid URI: ", e);
-            }
+      try {
+        broadcastUri = new URI(configuration.getBroadcastURI());
+      } catch (Exception ex) {
+        LOGGER.error("invalid URI: ", ex);
+      }
 
-            if (broadcastURI != null) {
-                if (broadcastURI.getScheme().equals("http")) {
-                    messagePersister = new BroadcastMessagePersister(broadcastURI.toString());
-                } else if (broadcastURI.getScheme().equals("udp")) {
-                    messagePersister = new LogstashUdpMessagePersister(broadcastURI.toString());
-                } else {
-                    LOGGER.error("You need to specify a broadcast URI with either a HTTP or UDP protocol defined.");
-                    throw new RuntimeException();
-                }
-            } else {
-                messagePersister = new SLF4JMessagePersister();
-            }
+      if (broadcastUri != null) {
+        if (broadcastUri.getScheme().equals("http")) {
+          messagePersister = new BroadcastMessagePersister(broadcastUri.toString());
+        } else if (broadcastUri.getScheme().equals("udp")) {
+          messagePersister = new LogstashUdpMessagePersister(broadcastUri.toString());
         } else {
-            messagePersister = new SLF4JMessagePersister();
+          LOGGER.error("You need to specify a broadcast URI with either a HTTP or UDP protocol defined.");
+          throw new RuntimeException();
         }
-
+      } else {
+        messagePersister = new Slf4jMessagePersister();
+      }
+    } else {
+      messagePersister = new Slf4jMessagePersister();
     }
 
-    public void shutdown() {
-        this.keepRunning = false;
-        LOGGER.debug("Shutting down BroadcastMonitor Thread");
-    }
+  }
 
-    public String getBroadcastURI() {
-        return configuration.getBroadcastURI();
-    }
+  public void shutdown() {
+    this.keepRunning = false;
+    LOGGER.debug("Shutting down BroadcastMonitor Thread");
+  }
 
-    public long getWaitTime() {
-        return configuration.getMonitoringBroadcastIntervalMs();
-    }
+  public String getBroadcastUri() {
+    return configuration.getBroadcastURI();
+  }
+
+  public long getWaitTime() {
+    return configuration.getMonitoringBroadcastIntervalMs();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/test/java/org/apache/streams/jackson/MemoryUsageDeserializerTest.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/test/java/org/apache/streams/jackson/MemoryUsageDeserializerTest.java b/streams-monitoring/src/test/java/org/apache/streams/jackson/MemoryUsageDeserializerTest.java
index 1c68239..8bf3219 100644
--- a/streams-monitoring/src/test/java/org/apache/streams/jackson/MemoryUsageDeserializerTest.java
+++ b/streams-monitoring/src/test/java/org/apache/streams/jackson/MemoryUsageDeserializerTest.java
@@ -15,13 +15,15 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.jackson;
 
+import org.apache.streams.pojo.json.MemoryUsageBroadcast;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.streams.pojo.json.MemoryUsageBroadcast;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -35,43 +37,46 @@ import static org.junit.Assert.assertNotNull;
 
 public class MemoryUsageDeserializerTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(MemoryUsageDeserializerTest.class);
-    private ObjectMapper objectMapper;
+  private static final Logger LOGGER = LoggerFactory.getLogger(MemoryUsageDeserializerTest.class);
+  private ObjectMapper objectMapper;
 
-    @Before
-    public void setup() {
-        objectMapper = StreamsJacksonMapper.getInstance();
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(MemoryUsageBroadcast.class, new MemoryUsageDeserializer());
-        objectMapper.registerModule(simpleModule);
-        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-    }
+  /**
+   * setup.
+   */
+  @Before
+  public void setup() {
+    objectMapper = StreamsJacksonMapper.getInstance();
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(MemoryUsageBroadcast.class, new MemoryUsageDeserializer());
+    objectMapper.registerModule(simpleModule);
+    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
 
-    @Test
-    public void serDeTest() {
-        InputStream is = MemoryUsageDeserializerTest.class.getResourceAsStream("/MemoryUsageObjects.json");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+  @Test
+  public void serDeTest() {
+    InputStream is = MemoryUsageDeserializerTest.class.getResourceAsStream("/MemoryUsageObjects.json");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
 
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                if (!StringUtils.isEmpty(line)) {
-                    LOGGER.info("raw: {}", line);
-                    MemoryUsageBroadcast broadcast = objectMapper.readValue(line, MemoryUsageBroadcast.class);
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        if (!StringUtils.isEmpty(line)) {
+          LOGGER.info("raw: {}", line);
+          MemoryUsageBroadcast broadcast = objectMapper.readValue(line, MemoryUsageBroadcast.class);
 
-                    LOGGER.info("activity: {}", broadcast);
+          LOGGER.info("activity: {}", broadcast);
 
-                    assertNotNull(broadcast);
-                    assertNotNull(broadcast.getVerbose());
-                    assertNotNull(broadcast.getObjectPendingFinalizationCount());
-                    assertNotNull(broadcast.getHeapMemoryUsage());
-                    assertNotNull(broadcast.getNonHeapMemoryUsage());
-                    assertNotNull(broadcast.getName());
-                }
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while testing serializability: {}", e);
+          assertNotNull(broadcast);
+          assertNotNull(broadcast.getVerbose());
+          assertNotNull(broadcast.getObjectPendingFinalizationCount());
+          assertNotNull(broadcast.getHeapMemoryUsage());
+          assertNotNull(broadcast.getNonHeapMemoryUsage());
+          assertNotNull(broadcast.getName());
         }
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while testing serializability: {}", ex);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersisterTest.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersisterTest.java b/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersisterTest.java
index 6e7ff6d..fc2ff71 100644
--- a/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersisterTest.java
+++ b/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/BroadcastMessagePersisterTest.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.monitoring.persist.impl;
 
 import com.google.common.collect.Lists;
@@ -28,33 +29,33 @@ import static org.junit.Assert.assertNotNull;
 
 public class BroadcastMessagePersisterTest {
 
-    @Test
-    public void testFailedPersist() {
-        BroadcastMessagePersister persister = new BroadcastMessagePersister("http://fake.url.com/fake_endpointasdfasdfas");
-
-        List<String> messages = Lists.newArrayList();
-        for(int x = 0; x < 10; x ++) {
-            messages.add("Fake_message #" + x);
-        }
-
-        int statusCode = persister.persistMessages(messages);
+  @Test
+  public void testFailedPersist() {
+    BroadcastMessagePersister persister = new BroadcastMessagePersister("http://fake.url.com/fake_endpointasdfasdfas");
 
-        assertNotNull(statusCode);
-        assertNotEquals(statusCode, 200);
+    List<String> messages = Lists.newArrayList();
+    for (int x = 0; x < 10; x++) {
+      messages.add("Fake_message #" + x);
     }
 
-    @Test
-    public void testInvalidURL() {
-        BroadcastMessagePersister persister = new BroadcastMessagePersister("h");
+    int statusCode = persister.persistMessages(messages);
 
-        List<String> messages = Lists.newArrayList();
-        for(int x = 0; x < 10; x ++) {
-            messages.add("Fake_message #" + x);
-        }
+    assertNotNull(statusCode);
+    assertNotEquals(statusCode, 200);
+  }
 
-        int statusCode = persister.persistMessages(messages);
+  @Test
+  public void testInvalidUrl() {
+    BroadcastMessagePersister persister = new BroadcastMessagePersister("h");
 
-        assertNotNull(statusCode);
-        assertEquals(statusCode, -1);
+    List<String> messages = Lists.newArrayList();
+    for (int x = 0; x < 10; x++) {
+      messages.add("Fake_message #" + x);
     }
+
+    int statusCode = persister.persistMessages(messages);
+
+    assertNotNull(statusCode);
+    assertEquals(statusCode, -1);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersisterTest.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersisterTest.java b/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersisterTest.java
index faa99a2..3f9a4c1 100644
--- a/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersisterTest.java
+++ b/streams-monitoring/src/test/java/org/apache/streams/monitoring/persist/impl/LogstashUdpMessagePersisterTest.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.monitoring.persist.impl;
 
 import com.google.common.base.Splitter;
@@ -29,47 +30,51 @@ import java.net.DatagramSocket;
 import java.net.SocketException;
 import java.util.List;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
 
 public class LogstashUdpMessagePersisterTest {
 
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(LogstashUdpMessagePersisterTest.class);
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(LogstashUdpMessagePersisterTest.class);
 
-    DatagramSocket socket = null;
+  DatagramSocket socket = null;
 
-    @Before
-    public void setup() {
-        try {
-            socket = new DatagramSocket(56789);
-        } catch (SocketException e) {
-            LOGGER.error("Metrics Broadcast Test Setup Failed: " + e.getMessage());
-        }
+  /**
+   * setup.
+   */
+  @Before
+  public void setup() {
+    try {
+      socket = new DatagramSocket(56789);
+    } catch (SocketException ex) {
+      LOGGER.error("Metrics Broadcast Test Setup Failed: " + ex.getMessage());
     }
+  }
 
 
-    @Test
-    public void testFailedPersist() {
-        LogstashUdpMessagePersister persister = new LogstashUdpMessagePersister("udp://127.0.0.1:56789");
-
-        List<String> messageArray = Lists.newArrayList();
-        for(int x = 0; x < 10; x ++) {
-            messageArray.add("Fake_message #" + x);
-        }
+  @Test
+  public void testFailedPersist() {
+    LogstashUdpMessagePersister persister = new LogstashUdpMessagePersister("udp://127.0.0.1:56789");
 
-        persister.persistMessages(messageArray);
-        byte[] receiveData = new byte[1024];
+    List<String> messageArray = Lists.newArrayList();
+    for (int x = 0; x < 10; x ++) {
+      messageArray.add("Fake_message #" + x);
+    }
 
-        DatagramPacket messageDatagram = new DatagramPacket(receiveData, receiveData.length);
+    persister.persistMessages(messageArray);
+    byte[] receiveData = new byte[1024];
 
-        try {
-            socket.receive(messageDatagram);
-            assertNotNull(messageDatagram);
-            List<String> messages = Lists.newArrayList(Splitter.on('\n').split(new String(messageDatagram.getData())));
-            assertEquals(messageArray, messages.subList(0,10));
-        } catch (IOException e) {
-            LOGGER.error("Metrics Broadcast Test Failed: " + e.getMessage());
-        }
+    DatagramPacket messageDatagram = new DatagramPacket(receiveData, receiveData.length);
 
+    try {
+      socket.receive(messageDatagram);
+      assertNotNull(messageDatagram);
+      List<String> messages = Lists.newArrayList(Splitter.on('\n').split(new String(messageDatagram.getData())));
+      assertEquals(messageArray, messages.subList(0,10));
+    } catch (IOException ex) {
+      LOGGER.error("Metrics Broadcast Test Failed: " + ex.getMessage());
     }
 
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/test/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThreadTest.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/test/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThreadTest.java b/streams-monitoring/src/test/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThreadTest.java
index a959bd2..ad1bf05 100644
--- a/streams-monitoring/src/test/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThreadTest.java
+++ b/streams-monitoring/src/test/java/org/apache/streams/monitoring/tasks/BroadcastMonitorThreadTest.java
@@ -15,67 +15,59 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.monitoring.tasks;
 
-import com.google.common.collect.Maps;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.local.monitoring.MonitoringConfiguration;
-import org.junit.Ignore;
+
 import org.junit.Test;
 
-import java.util.Map;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
 public class BroadcastMonitorThreadTest {
-    private ExecutorService executor;
 
-    @Test
-    public void testThreadEmptyBeanConfig() {
-        StreamsConfiguration streamsConfiguration = new StreamsConfiguration();
-        BroadcastMonitorThread thread = new BroadcastMonitorThread(streamsConfiguration);
-        testThread(thread);
-    }
+  private ExecutorService executor;
 
-    @Test
-    public void testThreadEmptyMapConfig() {
-        Map<String, Object> map = Maps.newHashMap();
-        BroadcastMonitorThread thread = new BroadcastMonitorThread(map);
-        testThread(thread);
-    }
+  @Test
+  public void testThreadEmptyBeanConfig() {
+    StreamsConfiguration streamsConfiguration = new StreamsConfiguration();
+    BroadcastMonitorThread thread = new BroadcastMonitorThread(streamsConfiguration);
+    testThread(thread);
+  }
 
-    @Test
-    public void testThreadFakeMapConfig() {
-        Map<String, Object> config = Maps.newHashMap();
-        config.put("broadcastURI", "http://fakeurl.com/fake");
-        BroadcastMonitorThread thread = new BroadcastMonitorThread(config);
-        testThread(thread);
-    }
 
-    @Test
-    public void testThreadStreamsConfig() {
 
-        StreamsConfiguration streams = new StreamsConfiguration();
-        MonitoringConfiguration monitoring = new MonitoringConfiguration();
-        monitoring.setBroadcastURI("http://fakeurl.com/fake");
-        monitoring.setMonitoringBroadcastIntervalMs(30000L);
-        streams.setAdditionalProperty("monitoring", monitoring);
-        BroadcastMonitorThread thread = new BroadcastMonitorThread(streams);
-        testThread(thread);
-    }
 
-    public void testThread(BroadcastMonitorThread thread) {
-        long testRunLength = thread.getWaitTime() * 1;
-        executor = Executors.newFixedThreadPool(1);
-        executor.submit(thread);
+  @Test
+  public void testThreadStreamsConfig() {
 
-        try {
-            Thread.sleep(testRunLength);
-        } catch(InterruptedException e) {
-            Thread.currentThread().interrupt();
-        }
+    StreamsConfiguration streams = new StreamsConfiguration();
+    MonitoringConfiguration monitoring = new MonitoringConfiguration();
+    monitoring.setBroadcastURI("http://fakeurl.com/fake");
+    monitoring.setMonitoringBroadcastIntervalMs(30000L);
+    streams.setAdditionalProperty("monitoring", monitoring);
+    BroadcastMonitorThread thread = new BroadcastMonitorThread(streams);
+    testThread(thread);
+  }
 
-        executor.shutdown();
+  /**
+   * Base Test.
+   * @param thread BroadcastMonitorThread
+   */
+  public void testThread(BroadcastMonitorThread thread) {
+    long testRunLength = thread.getWaitTime() * 1;
+    executor = Executors.newFixedThreadPool(1);
+    executor.submit(thread);
+
+    try {
+      Thread.sleep(testRunLength);
+    } catch (InterruptedException ex) {
+      Thread.currentThread().interrupt();
     }
 
+    executor.shutdown();
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraGenerationConfig.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraGenerationConfig.java b/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraGenerationConfig.java
index 964fff6..971b99f 100644
--- a/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraGenerationConfig.java
+++ b/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraGenerationConfig.java
@@ -16,9 +16,11 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.cassandra;
 
 import org.apache.streams.util.schema.GenerationConfig;
+
 import org.jsonschema2pojo.DefaultGenerationConfig;
 import org.jsonschema2pojo.util.URLUtil;
 
@@ -32,68 +34,71 @@ import java.util.List;
 import java.util.Set;
 
 /**
- * Configures StreamsHiveResourceGenerator
- *
- *
+ * Configures StreamsCassandraResourceGenerator.
  */
 public class StreamsCassandraGenerationConfig extends DefaultGenerationConfig implements GenerationConfig {
 
-    public String getSourceDirectory() {
-        return sourceDirectory;
-    }
+  public String getSourceDirectory() {
+    return sourceDirectory;
+  }
 
-    public List<String> getSourcePaths() {
-        return sourcePaths;
-    }
+  public List<String> getSourcePaths() {
+    return sourcePaths;
+  }
 
-    private String sourceDirectory;
-    private List<String> sourcePaths = new ArrayList<String>();
-    private String targetDirectory;
-    private int maxDepth = 1;
+  private String sourceDirectory;
+  private List<String> sourcePaths = new ArrayList<String>();
+  private String targetDirectory;
+  private int maxDepth = 1;
 
-    public Set<String> getExclusions() {
-        return exclusions;
-    }
+  public Set<String> getExclusions() {
+    return exclusions;
+  }
 
-    public void setExclusions(Set<String> exclusions) {
-        this.exclusions = exclusions;
-    }
+  public void setExclusions(Set<String> exclusions) {
+    this.exclusions = exclusions;
+  }
 
-    private Set<String> exclusions = new HashSet<String>();
+  private Set<String> exclusions = new HashSet<String>();
 
-    public int getMaxDepth() {
-        return maxDepth;
-    }
+  public int getMaxDepth() {
+    return maxDepth;
+  }
 
-    public void setSourceDirectory(String sourceDirectory) {
-        this.sourceDirectory = sourceDirectory;
-    }
+  public void setSourceDirectory(String sourceDirectory) {
+    this.sourceDirectory = sourceDirectory;
+  }
 
-    public void setSourcePaths(List<String> sourcePaths) {
-        this.sourcePaths = sourcePaths;
-    }
+  public void setSourcePaths(List<String> sourcePaths) {
+    this.sourcePaths = sourcePaths;
+  }
 
-    public void setTargetDirectory(String targetDirectory) {
-        this.targetDirectory = targetDirectory;
-    }
+  public void setTargetDirectory(String targetDirectory) {
+    this.targetDirectory = targetDirectory;
+  }
 
-    public File getTargetDirectory() {
-        return new File(targetDirectory);
-    }
+  public File getTargetDirectory() {
+    return new File(targetDirectory);
+  }
 
-    public Iterator<URL> getSource() {
-        if (null != sourceDirectory) {
-            return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
-        }
-        List<URL> sourceURLs = new ArrayList<URL>();
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            for (String source : sourcePaths) {
-                sourceURLs.add(URLUtil.parseURL(source));
-            }
-        return sourceURLs.iterator();
+  /**
+   * get all sources.
+   * @return Iterator of URL
+   */
+  public Iterator<URL> getSource() {
+    if (null != sourceDirectory) {
+      return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
     }
-
-    public void setMaxDepth(int maxDepth) {
-        this.maxDepth = maxDepth;
+    List<URL> sourceUrls = new ArrayList<URL>();
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      for (String source : sourcePaths) {
+        sourceUrls.add(URLUtil.parseURL(source));
+      }
     }
+    return sourceUrls.iterator();
+  }
+
+  public void setMaxDepth(int maxDepth) {
+    this.maxDepth = maxDepth;
+  }
 }


[21/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityConverter.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityConverter.java
index ef74371..b8ce79b 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityConverter.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityConverter.java
@@ -18,65 +18,69 @@
 
 package org.apache.streams.twitter.converter;
 
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.twitter.pojo.User;
 
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.NotImplementedException;
+
 import java.util.List;
 
 import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.updateActivity;
 
 public class TwitterJsonUserActivityConverter implements ActivityConverter<User> {
 
-    public static Class requiredClass = User.class;
+  public static Class requiredClass = User.class;
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    private static TwitterJsonUserActivityConverter instance = new TwitterJsonUserActivityConverter();
+  private static TwitterJsonUserActivityConverter instance = new TwitterJsonUserActivityConverter();
 
-    public static TwitterJsonUserActivityConverter getInstance() {
-        return instance;
-    }
+  public static TwitterJsonUserActivityConverter getInstance() {
+    return instance;
+  }
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public User fromActivity(Activity deserialized) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
+  @Override
+  public User fromActivity(Activity deserialized) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
 
-    @Override
-    public List<Activity> toActivityList(User user) throws ActivityConversionException {
+  @Override
+  public List<User> fromActivityList(List<Activity> list) {
+    throw new NotImplementedException();
+  }
 
-        Activity activity = new Activity();
-        updateActivity(user, activity);
 
-        return Lists.newArrayList(activity);
-    }
+  @Override
+  public List<Activity> toActivityList(User user) throws ActivityConversionException {
 
-    @Override
-    public List<User> fromActivityList(List<Activity> list) {
-        throw new NotImplementedException();
-    }
+    Activity activity = new Activity();
+    updateActivity(user, activity);
+
+    return Lists.newArrayList(activity);
+  }
 
-    @Override
-    public List<Activity> toActivityList(List<User> serializedList) {
-        List<Activity> result = Lists.newArrayList();
-        for( User item : serializedList ) {
-            try {
-                List<Activity> activities = toActivityList(item);
-                result.addAll(activities);
-            } catch (ActivityConversionException e) {}
-        }
-        return result;
+  @Override
+  public List<Activity> toActivityList(List<User> serializedList) {
+    List<Activity> result = Lists.newArrayList();
+    for ( User item : serializedList ) {
+      try {
+        List<Activity> activities = toActivityList(item);
+        result.addAll(activities);
+      } catch (ActivityConversionException ex) {
+        //
+      }
     }
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityObjectConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityObjectConverter.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityObjectConverter.java
index d62b1e8..7cb4158 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityObjectConverter.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserActivityObjectConverter.java
@@ -18,47 +18,42 @@
 
 package org.apache.streams.twitter.converter;
 
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
-import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.data.ActivityObjectConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
-import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.twitter.pojo.User;
 
-import java.util.List;
+import org.apache.commons.lang.NotImplementedException;
 
 import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.buildActor;
-import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.updateActivity;
 
 public class TwitterJsonUserActivityObjectConverter implements ActivityObjectConverter<User> {
 
-    public static Class requiredClass = User.class;
+  public static Class requiredClass = User.class;
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    private static TwitterJsonUserActivityObjectConverter instance = new TwitterJsonUserActivityObjectConverter();
+  private static TwitterJsonUserActivityObjectConverter instance = new TwitterJsonUserActivityObjectConverter();
 
-    public static TwitterJsonUserActivityObjectConverter getInstance() {
-        return instance;
-    }
+  public static TwitterJsonUserActivityObjectConverter getInstance() {
+    return instance;
+  }
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public User fromActivityObject(ActivityObject deserialized) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
+  @Override
+  public User fromActivityObject(ActivityObject deserialized) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
 
-    @Override
-    public ActivityObject toActivityObject(User serialized) throws ActivityConversionException {
-        return buildActor(serialized);
-    }
+  @Override
+  public ActivityObject toActivityObject(User serialized) throws ActivityConversionException {
+    return buildActor(serialized);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserstreameventActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserstreameventActivityConverter.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserstreameventActivityConverter.java
index bb31fd6..6685a96 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserstreameventActivityConverter.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonUserstreameventActivityConverter.java
@@ -18,15 +18,16 @@
 
 package org.apache.streams.twitter.converter;
 
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.twitter.pojo.UserstreamEvent;
 
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.NotImplementedException;
+
 import java.util.List;
 
 import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.formatId;
@@ -34,87 +35,101 @@ import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.getP
 
 
 /**
-* Created with IntelliJ IDEA.
-* User: mdelaet
-* Date: 9/30/13
-* Time: 9:24 AM
-* To change this template use File | Settings | File Templates.
-*/
+ * TwitterJsonUserstreameventActivityConverter.
+ */
+// TODO: Use this class explicitely somewhere
 public class TwitterJsonUserstreameventActivityConverter implements ActivityConverter<UserstreamEvent> {
 
-    public static Class requiredClass = UserstreamEvent.class;
-
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
-
-    private static TwitterJsonUserstreameventActivityConverter instance = new TwitterJsonUserstreameventActivityConverter();
-
-    public static TwitterJsonUserstreameventActivityConverter getInstance() {
-        return instance;
-    }
-
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
-
-    @Override
-    public UserstreamEvent fromActivity(Activity deserialized) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public List<Activity> toActivityList(UserstreamEvent userstreamEvent) throws ActivityConversionException {
-
-        Activity activity = convert(userstreamEvent);
-        return Lists.newArrayList(activity);
-
-    }
-
-    @Override
-    public List<UserstreamEvent> fromActivityList(List<Activity> list) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public List<Activity> toActivityList(List<UserstreamEvent> serializedList) {
-        return null;
-    }
-
-    public Activity convert(UserstreamEvent event) throws ActivityConversionException {
-
-        Activity activity = new Activity();
-        activity.setActor(buildActor(event));
-        activity.setVerb(detectVerb(event));
-        activity.setObject(buildActivityObject(event));
-        activity.setId(formatId(activity.getVerb()));
-        if(Strings.isNullOrEmpty(activity.getId()))
-            throw new ActivityConversionException("Unable to determine activity id");
-        activity.setProvider(getProvider());
-        return activity;
-    }
-
-    public ActivityObject buildActor(UserstreamEvent event) {
-        ActivityObject actor = new ActivityObject();
-        //actor.setId(formatId(delete.getDelete().getStatus().getUserIdStr()));
-        return actor;
-    }
-
-    public ActivityObject buildActivityObject(UserstreamEvent event) {
-        ActivityObject actObj = new ActivityObject();
-        //actObj.setId(formatId(delete.getDelete().getStatus().getIdStr()));
-        //actObj.setObjectType("tweet");
-        return actObj;
-    }
-
-    public String detectVerb(UserstreamEvent event) {
-        return null;
-    }
-
-    public ActivityObject buildTarget(UserstreamEvent event) {
-        return null;
+  public static Class requiredClass = UserstreamEvent.class;
+
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
+
+  private static TwitterJsonUserstreameventActivityConverter instance = new TwitterJsonUserstreameventActivityConverter();
+
+  public static TwitterJsonUserstreameventActivityConverter getInstance() {
+    return instance;
+  }
+
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
+
+  @Override
+  public UserstreamEvent fromActivity(Activity deserialized) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public List<UserstreamEvent> fromActivityList(List<Activity> list) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public List<Activity> toActivityList(UserstreamEvent userstreamEvent) throws ActivityConversionException {
+
+    Activity activity = convert(userstreamEvent);
+    return Lists.newArrayList(activity);
+
+  }
+
+  @Override
+  public List<Activity> toActivityList(List<UserstreamEvent> serializedList) {
+    return null;
+  }
+
+  /**
+   * convert UserstreamEvent to Activity.
+   * @param event UserstreamEvent
+   * @return Activity
+   * @throws ActivityConversionException ActivityConversionException
+   */
+  public Activity convert(UserstreamEvent event) throws ActivityConversionException {
+
+    Activity activity = new Activity();
+    activity.setActor(buildActor(event));
+    activity.setVerb(detectVerb(event));
+    activity.setObject(buildActivityObject(event));
+    activity.setId(formatId(activity.getVerb()));
+    if (Strings.isNullOrEmpty(activity.getId())) {
+      throw new ActivityConversionException("Unable to determine activity id");
     }
+    activity.setProvider(getProvider());
+    return activity;
+  }
+
+  /**
+   * build ActivityObject from UserstreamEvent
+   * @param event UserstreamEvent
+   * @return $.actor
+   */
+  public ActivityObject buildActor(UserstreamEvent event) {
+    ActivityObject actor = new ActivityObject();
+    //actor.setId(formatId(delete.getDelete().getStatus().getUserIdStr()));
+    return actor;
+  }
+
+  /**
+   * build ActivityObject from UserstreamEvent
+   * @param event UserstreamEvent
+   * @return $.object
+   */
+  public ActivityObject buildActivityObject(UserstreamEvent event) {
+    ActivityObject actObj = new ActivityObject();
+    //actObj.setId(formatId(delete.getDelete().getStatus().getIdStr()));
+    //actObj.setObjectType("tweet");
+    return actObj;
+  }
+
+  public String detectVerb(UserstreamEvent event) {
+    return null;
+  }
+
+  public ActivityObject buildTarget(UserstreamEvent event) {
+    return null;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/util/TwitterActivityUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/util/TwitterActivityUtil.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/util/TwitterActivityUtil.java
index 4015514..e0e2e80 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/util/TwitterActivityUtil.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/util/TwitterActivityUtil.java
@@ -19,12 +19,6 @@
 
 package org.apache.streams.twitter.converter.util;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.base.Optional;
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
@@ -41,6 +35,14 @@ import org.apache.streams.twitter.pojo.Retweet;
 import org.apache.streams.twitter.pojo.Tweet;
 import org.apache.streams.twitter.pojo.User;
 import org.apache.streams.twitter.pojo.UserMentions;
+import org.apache.streams.twitter.provider.TwitterErrorHandler;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,323 +54,342 @@ import java.util.Map;
 import static com.google.common.math.DoubleMath.mean;
 
 /**
- * Provides utilities for working with Activity objects within the context of Twitter
+ * Provides utilities for working with Activity objects within the context of Twitter.
  */
 public class TwitterActivityUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterActivityUtil.class);
-
-    /**
-     * Updates the given Activity object with the values from the Tweet
-     * @param tweet the object to use as the source
-     * @param activity the target of the updates.  Will receive all values from the tweet.
-     * @throws org.apache.streams.exceptions.ActivityConversionException
-     */
-    public static void updateActivity(Tweet tweet, Activity activity) throws ActivityConversionException {
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-        activity.setActor(buildActor(tweet));
-        activity.setId(formatId(activity.getVerb(),
-                Optional.fromNullable(
-                        tweet.getIdStr())
-                        .or(Optional.of(tweet.getId().toString()))
-                        .orNull()));
-
-        if(tweet instanceof Retweet) {
-            updateActivityContent(activity,  ((Retweet) tweet).getRetweetedStatus(), "share");
-        } else {
-            updateActivityContent(activity, tweet, "post");
-        }
-
-        if(Strings.isNullOrEmpty(activity.getId()))
-            throw new ActivityConversionException("Unable to determine activity id");
-        try {
-            activity.setPublished(tweet.getCreatedAt());
-        } catch( Exception e ) {
-            throw new ActivityConversionException("Unable to determine publishedDate", e);
-        }
-        activity.setTarget(buildTarget(tweet));
-        activity.setProvider(getProvider());
-        activity.setUrl(String.format("http://twitter.com/%s/%s/%s", tweet.getUser().getScreenName(),"/status/",tweet.getIdStr()));
-
-        addTwitterExtension(activity, mapper.convertValue(tweet, ObjectNode.class));
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterActivityUtil.class);
+
+  static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  /**
+   * Updates the given Activity object with the values from the Tweet.
+   * @param tweet the object to use as the source
+   * @param activity the target of the updates.  Will receive all values from the tweet.
+   * @throws ActivityConversionException ActivityConversionException
+   */
+  public static void updateActivity(Tweet tweet, Activity activity) throws ActivityConversionException {
+    activity.setActor(buildActor(tweet));
+    activity.setId(formatId(activity.getVerb(),
+        Optional.fromNullable(
+            tweet.getIdStr())
+            .or(Optional.of(tweet.getId().toString()))
+            .orNull()));
+
+    if (tweet instanceof Retweet) {
+      updateActivityContent(activity,  ((Retweet) tweet).getRetweetedStatus(), "share");
+    } else {
+      updateActivityContent(activity, tweet, "post");
     }
 
-    /**
-     * Updates the given Activity object with the values from the User
-     * @param user the object to use as the source
-     * @param activity the target of the updates.  Will receive all values from the tweet.
-     */
-    public static void updateActivity(User user, Activity activity) {
-        activity.setActor(buildActor(user));
-        activity.setId(null);
-        activity.setVerb(null);
+    if (Strings.isNullOrEmpty(activity.getId())) {
+      throw new ActivityConversionException("Unable to determine activity id");
     }
-
-    /**
-     * Updates the activity for a delete event
-     * @param delete the delete event
-     * @param activity the Activity object to update
-     * @throws org.apache.streams.exceptions.ActivityConversionException
-     */
-    public static void updateActivity(Delete delete, Activity activity) throws ActivityConversionException {
-        activity.setActor(buildActor(delete));
-        activity.setVerb("delete");
-        activity.setObject(buildActivityObject(delete));
-        activity.setId(formatId(activity.getVerb(), delete.getDelete().getStatus().getIdStr()));
-        if(Strings.isNullOrEmpty(activity.getId()))
-            throw new ActivityConversionException("Unable to determine activity id");
-        activity.setProvider(getProvider());
-        addTwitterExtension(activity, StreamsJacksonMapper.getInstance().convertValue(delete, ObjectNode.class));
+    try {
+      activity.setPublished(tweet.getCreatedAt());
+    } catch ( Exception ex ) {
+      throw new ActivityConversionException("Unable to determine publishedDate", ex);
     }
-
-    /**
-     * Builds the actor for a delete event
-     * @param delete the delete event
-     * @return a valid Actor
-     */
-    public static ActivityObject buildActor(Delete delete) {
-        ActivityObject actor = new ActivityObject();
-        actor.setId(formatId(delete.getDelete().getStatus().getUserIdStr()));
-        actor.setObjectType("page");
-        return actor;
+    activity.setTarget(buildTarget(tweet));
+    activity.setProvider(getProvider());
+    activity.setUrl(String.format("http://twitter.com/%s/%s/%s", tweet.getUser().getScreenName(),"/status/",tweet.getIdStr()));
+
+    addTwitterExtension(activity, mapper.convertValue(tweet, ObjectNode.class));
+  }
+
+  /**
+   * Updates the given Activity object with the values from the User
+   * @param user the object to use as the source
+   * @param activity the target of the updates.  Will receive all values from the tweet.
+   */
+  public static void updateActivity(User user, Activity activity) {
+    activity.setActor(buildActor(user));
+    activity.setId(null);
+    activity.setVerb(null);
+  }
+
+  /**
+   * Updates the activity for a delete event.
+   * @param delete the delete event
+   * @param activity the Activity object to update
+   * @throws ActivityConversionException ActivityConversionException
+   */
+  public static void updateActivity(Delete delete, Activity activity) throws ActivityConversionException {
+    activity.setActor(buildActor(delete));
+    activity.setVerb("delete");
+    activity.setObject(buildActivityObject(delete));
+    activity.setId(formatId(activity.getVerb(), delete.getDelete().getStatus().getIdStr()));
+    if (Strings.isNullOrEmpty(activity.getId())) {
+      throw new ActivityConversionException("Unable to determine activity id");
     }
-
-    /**
-     * Builds the ActivityObject for the delete event
-     * @param delete the delete event
-     * @return a valid Activity Object
-     */
-    public static ActivityObject buildActivityObject(Delete delete) {
-        ActivityObject actObj = new ActivityObject();
-        actObj.setId(formatId(delete.getDelete().getStatus().getIdStr()));
-        actObj.setObjectType("tweet");
-        return actObj;
+    activity.setProvider(getProvider());
+    addTwitterExtension(activity, StreamsJacksonMapper.getInstance().convertValue(delete, ObjectNode.class));
+  }
+
+  /**
+   * Builds the activity {@link org.apache.streams.pojo.json.ActivityObject} actor from the tweet
+   * @param tweet the object to use as the source
+   * @return a valid Actor populated from the Tweet
+   */
+  public static ActivityObject buildActor(Tweet tweet) {
+    ActivityObject actor = new ActivityObject();
+    User user = tweet.getUser();
+
+    return buildActor(user);
+  }
+
+  /**
+   * Builds the activity {@link org.apache.streams.pojo.json.ActivityObject} actor from the User
+   * @param user the object to use as the source
+   * @return a valid Actor populated from the Tweet
+   */
+  public static ActivityObject buildActor(User user) {
+    ActivityObject actor = new ActivityObject();
+    actor.setId(formatId(
+        Optional.fromNullable(
+            user.getIdStr())
+            .or(Optional.of(user.getId().toString()))
+            .orNull()
+    ));
+    actor.setObjectType("page");
+    actor.setDisplayName(user.getName());
+    actor.setAdditionalProperty("handle", user.getScreenName());
+    actor.setSummary(user.getDescription());
+
+    if (user.getUrl() != null) {
+      actor.setUrl(user.getUrl());
     }
 
-
-    /**
-     * Updates the content, and associated fields, with those from the given tweet
-     * @param activity the target of the updates.  Will receive all values from the tweet.
-     * @param tweet the object to use as the source
-     * @param verb the verb for the given activity's type
-     */
-    public static void updateActivityContent(Activity activity, Tweet tweet, String verb) {
-        activity.setVerb(verb);
-        activity.setTitle("");
-        if( tweet != null ) {
-            activity.setObject(buildActivityObject(tweet));
-            activity.setLinks(getLinks(tweet));
-            activity.setContent(tweet.getText());
-            addLocationExtension(activity, tweet);
-            addTwitterExtensions(activity, tweet);
-        }
+    Map<String, Object> extensions = new HashMap<>();
+    extensions.put("location", user.getLocation());
+    extensions.put("posts", user.getStatusesCount());
+    extensions.put("favorites", user.getFavouritesCount());
+    extensions.put("followers", user.getFollowersCount());
+
+    Image profileImage = new Image();
+    profileImage.setUrl(user.getProfileImageUrlHttps());
+    actor.setImage(profileImage);
+
+    extensions.put("screenName", user.getScreenName());
+
+    actor.setAdditionalProperty("extensions", extensions);
+    return actor;
+  }
+
+  /**
+   * Builds the actor for a delete event.
+   * @param delete the delete event
+   * @return a valid Actor
+   */
+  public static ActivityObject buildActor(Delete delete) {
+    ActivityObject actor = new ActivityObject();
+    actor.setId(formatId(delete.getDelete().getStatus().getUserIdStr()));
+    actor.setObjectType("page");
+    return actor;
+  }
+
+  /**
+   * Creates an {@link org.apache.streams.pojo.json.ActivityObject} for the tweet
+   * @param tweet the object to use as the source
+   * @return a valid ActivityObject
+   */
+  public static ActivityObject buildActivityObject(Tweet tweet) {
+    ActivityObject actObj = new ActivityObject();
+    String id =  Optional.fromNullable(
+        tweet.getIdStr())
+        .or(Optional.of(tweet.getId().toString()))
+        .orNull();
+    if ( id != null ) {
+      actObj.setId(id);
     }
-
-    /**
-     * Creates an {@link org.apache.streams.pojo.json.ActivityObject} for the tweet
-     * @param tweet the object to use as the source
-     * @return a valid ActivityObject
-     */
-    public static ActivityObject buildActivityObject(Tweet tweet) {
-        ActivityObject actObj = new ActivityObject();
-        String id =  Optional.fromNullable(
-                tweet.getIdStr())
-                .or(Optional.of(tweet.getId().toString()))
-                .orNull();
-        if( id != null )
-            actObj.setId(id);
-        actObj.setObjectType("post");
-        actObj.setContent(tweet.getText());
-        return actObj;
+    actObj.setObjectType("post");
+    actObj.setContent(tweet.getText());
+    return actObj;
+  }
+
+  /**
+   * Builds the ActivityObject for the delete event.
+   * @param delete the delete event
+   * @return a valid Activity Object
+   */
+  public static ActivityObject buildActivityObject(Delete delete) {
+    ActivityObject actObj = new ActivityObject();
+    actObj.setId(formatId(delete.getDelete().getStatus().getIdStr()));
+    actObj.setObjectType("tweet");
+    return actObj;
+  }
+
+  /**
+   * Updates the content, and associated fields, with those from the given tweet
+   * @param activity the target of the updates.  Will receive all values from the tweet.
+   * @param tweet the object to use as the source
+   * @param verb the verb for the given activity's type
+   */
+  public static void updateActivityContent(Activity activity, Tweet tweet, String verb) {
+    activity.setVerb(verb);
+    activity.setTitle("");
+    if ( tweet != null ) {
+      activity.setObject(buildActivityObject(tweet));
+      activity.setLinks(getLinks(tweet));
+      activity.setContent(tweet.getText());
+      addLocationExtension(activity, tweet);
+      addTwitterExtensions(activity, tweet);
     }
+  }
 
-    /**
-     * Builds the activity {@link org.apache.streams.pojo.json.ActivityObject} actor from the tweet
-     * @param tweet the object to use as the source
-     * @return a valid Actor populated from the Tweet
-     */
-    public static ActivityObject buildActor(Tweet tweet) {
-        ActivityObject actor = new ActivityObject();
-        User user = tweet.getUser();
 
-        return buildActor(user);
-    }
 
-    /**
-     * Builds the activity {@link org.apache.streams.pojo.json.ActivityObject} actor from the User
-     * @param user the object to use as the source
-     * @return a valid Actor populated from the Tweet
-     */
-    public static ActivityObject buildActor(User user) {
-        ActivityObject actor = new ActivityObject();
-        actor.setId(formatId(
-                Optional.fromNullable(
-                        user.getIdStr())
-                        .or(Optional.of(user.getId().toString()))
-                        .orNull()
-        ));
-        actor.setObjectType("page");
-        actor.setDisplayName(user.getName());
-        actor.setAdditionalProperty("handle", user.getScreenName());
-        actor.setSummary(user.getDescription());
-
-        if (user.getUrl()!=null){
-            actor.setUrl(user.getUrl());
-        }
-
-        Map<String, Object> extensions = new HashMap<>();
-        extensions.put("location", user.getLocation());
-        extensions.put("posts", user.getStatusesCount());
-        extensions.put("favorites", user.getFavouritesCount());
-        extensions.put("followers", user.getFollowersCount());
-
-        Image profileImage = new Image();
-        profileImage.setUrl(user.getProfileImageUrlHttps());
-        actor.setImage(profileImage);
-
-        extensions.put("screenName", user.getScreenName());
-
-        actor.setAdditionalProperty("extensions", extensions);
-        return actor;
-    }
 
-    /**
-     * Gets the links from the Twitter event
-     * @param tweet the object to use as the source
-     * @return a list of links corresponding to the expanded URL (no t.co)
-     */
-    public static List<String> getLinks(Tweet tweet) {
-        List<String> links = new ArrayList<>();
-        if( tweet.getEntities().getUrls() != null ) {
-            for (Url url : tweet.getEntities().getUrls()) {
-                links.add(url.getExpandedUrl());
-            }
-        }
-        else
-            LOGGER.debug("  0 links");
-        return links;
-    }
 
-    /**
-     * Builds the {@link org.apache.streams.twitter.pojo.TargetObject} from the tweet
-     * @param tweet the object to use as the source
-     * @return currently returns null for all activities
-     */
-    public static ActivityObject buildTarget(Tweet tweet) {
-        return null;
-    }
 
-    /**
-     * Adds the location extension and populates with teh twitter data
-     * @param activity the Activity object to update
-     * @param tweet the object to use as the source
-     */
-    public static void addLocationExtension(Activity activity, Tweet tweet) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-        Map<String, Object> location = new HashMap<>();
-        location.put("id", formatId(
-                Optional.fromNullable(
-                        tweet.getIdStr())
-                        .or(Optional.of(tweet.getId().toString()))
-                        .orNull()
-        ));
-        location.put("coordinates", boundingBoxCenter(tweet.getPlace()));       
-        extensions.put("location", location);
-    }
 
-    /**
-     * Gets the common twitter {@link org.apache.streams.pojo.json.Provider} object
-     * @return a provider object representing Twitter
-     */
-    public static Provider getProvider() {
-        Provider provider = new Provider();
-        provider.setId("id:providers:twitter");
-        provider.setObjectType("application");
-        provider.setDisplayName("Twitter");
-        return provider;
+  /**
+   * Gets the links from the Twitter event
+   * @param tweet the object to use as the source
+   * @return a list of links corresponding to the expanded URL (no t.co)
+   */
+  public static List<String> getLinks(Tweet tweet) {
+    List<String> links = new ArrayList<>();
+    if ( tweet.getEntities().getUrls() != null ) {
+      for (Url url : tweet.getEntities().getUrls()) {
+        links.add(url.getExpandedUrl());
+      }
+    } else {
+      LOGGER.debug(" 0 links");
     }
-    /**
-     * Adds the given Twitter event to the activity as an extension
-     * @param activity the Activity object to update
-     * @param event the Twitter event to add as the extension
-     */
-    public static void addTwitterExtension(Activity activity, ObjectNode event) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-        extensions.put("twitter", event);
+    return links;
+  }
+
+  /**
+   * Builds the {@link org.apache.streams.twitter.pojo.TargetObject} from the tweet.
+   * @param tweet the object to use as the source
+   * @return currently returns null for all activities
+   */
+  public static ActivityObject buildTarget(Tweet tweet) {
+    return null;
+  }
+
+  /**
+   * Adds the location extension and populates with teh twitter data.
+   * @param activity the Activity object to update
+   * @param tweet the object to use as the source
+   */
+  public static void addLocationExtension(Activity activity, Tweet tweet) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+    Map<String, Object> location = new HashMap<>();
+    location.put("id", formatId(
+        Optional.fromNullable(
+            tweet.getIdStr())
+            .or(Optional.of(tweet.getId().toString()))
+            .orNull()
+    ));
+    location.put("coordinates", boundingBoxCenter(tweet.getPlace()));
+    extensions.put("location", location);
+  }
+
+  /**
+   * Gets the common twitter {@link org.apache.streams.pojo.json.Provider} object
+   * @return a provider object representing Twitter
+   */
+  public static Provider getProvider() {
+    Provider provider = new Provider();
+    provider.setId("id:providers:twitter");
+    provider.setObjectType("application");
+    provider.setDisplayName("Twitter");
+    return provider;
+  }
+
+  /**
+   * Adds the given Twitter event to the activity as an extension.
+   * @param activity the Activity object to update
+   * @param event the Twitter event to add as the extension
+   */
+  public static void addTwitterExtension(Activity activity, ObjectNode event) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+    extensions.put("twitter", event);
+  }
+
+  /**
+   * Formats the ID to conform with the Apache Streams activity ID convention.
+   * @param idparts the parts of the ID to join
+   * @return a valid Activity ID in format "id:twitter:part1:part2:...partN"
+   */
+  public static String formatId(String... idparts) {
+    return Joiner.on(":").join(Lists.asList("id:twitter", idparts));
+  }
+
+  /**
+   * Takes various parameters from the twitter object that are currently not part of the
+   * activity schema and stores them in a generic extensions attribute.
+   * @param activity Activity
+   * @param tweet Tweet
+   */
+  public static void addTwitterExtensions(Activity activity, Tweet tweet) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+
+    List<String> hashtags = new ArrayList<>();
+    for (Hashtag hashtag : tweet.getEntities().getHashtags()) {
+      hashtags.add(hashtag.getText());
     }
-    /**
-     * Formats the ID to conform with the Apache Streams activity ID convention
-     * @param idparts the parts of the ID to join
-     * @return a valid Activity ID in format "id:twitter:part1:part2:...partN"
-     */
-    public static String formatId(String... idparts) {
-        return Joiner.on(":").join(Lists.asList("id:twitter", idparts));
-    }
-
-    /**
-     * Takes various parameters from the twitter object that are currently not part of teh
-     * activity schema and stores them in a generic extensions attribute
-     * @param activity
-     * @param tweet
-     */
-    public static void addTwitterExtensions(Activity activity, Tweet tweet) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+    extensions.put("hashtags", hashtags);
 
-        List<String> hashtags = new ArrayList<>();
-        for(Hashtag hashtag : tweet.getEntities().getHashtags()) {
-            hashtags.add(hashtag.getText());
-        }
-        extensions.put("hashtags", hashtags);
+    Map<String, Object> likes = new HashMap<>();
+    likes.put("perspectival", tweet.getFavorited());
+    likes.put("count", tweet.getAdditionalProperties().get("favorite_count"));
 
-        Map<String, Object> likes = new HashMap<>();
-        likes.put("perspectival", tweet.getFavorited());
-        likes.put("count", tweet.getAdditionalProperties().get("favorite_count"));
+    extensions.put("likes", likes);
 
-        extensions.put("likes", likes);
+    Map<String, Object> rebroadcasts = new HashMap<>();
+    rebroadcasts.put("perspectival", tweet.getRetweeted());
+    rebroadcasts.put("count", tweet.getRetweetCount());
 
-        Map<String, Object> rebroadcasts = new HashMap<>();
-        rebroadcasts.put("perspectival", tweet.getRetweeted());
-        rebroadcasts.put("count", tweet.getRetweetCount());
+    extensions.put("rebroadcasts", rebroadcasts);
 
-        extensions.put("rebroadcasts", rebroadcasts);
+    List<Map<String, Object>> userMentions = new ArrayList<>();
+    Entities entities = tweet.getEntities();
 
-        List<Map<String, Object>> userMentions = new ArrayList<>();
-        Entities entities = tweet.getEntities();
+    for (UserMentions user : entities.getUserMentions()) {
+      //Map the twitter user object into an actor
+      Map<String, Object> actor = new HashMap<>();
+      actor.put("id", "id:twitter:" + user.getIdStr());
+      actor.put("displayName", user.getName());
+      actor.put("handle", user.getScreenName());
 
-        for(UserMentions user : entities.getUserMentions()) {
-            //Map the twitter user object into an actor
-            Map<String, Object> actor = new HashMap<>();
-            actor.put("id", "id:twitter:" + user.getIdStr());
-            actor.put("displayName", user.getName());
-            actor.put("handle", user.getScreenName());
+      userMentions.add(actor);
+    }
 
-            userMentions.add(actor);
-        }
+    extensions.put("user_mentions", userMentions);
 
-        extensions.put("user_mentions", userMentions);
+    extensions.put("keywords", tweet.getText());
+  }
 
-        extensions.put("keywords", tweet.getText());
+  /**
+   * Compute central coordinates from bounding box.
+   * @param place the bounding box to use as the source
+   */
+  public static List<Double> boundingBoxCenter(Place place) {
+    if ( place == null ) {
+      return new ArrayList<>();
     }
-
-    /**
-     * Compute central coordinates from bounding box
-     * @param place the bounding box to use as the source
-     */
-    public static List<Double> boundingBoxCenter(Place place) {
-        if( place == null ) return new ArrayList<>();
-        if( place.getBoundingBox() == null ) return new ArrayList<>();
-        if( place.getBoundingBox().getCoordinates().size() != 1 ) return new ArrayList<>();
-        if( place.getBoundingBox().getCoordinates().get(0).size() != 4 ) return new ArrayList<>();
-        List<Double> lats = new ArrayList<>();
-        List<Double> lons = new ArrayList<>();
-        for( List<Double> point : place.getBoundingBox().getCoordinates().get(0)) {
-            lats.add(point.get(0));
-            lons.add(point.get(1));
-        }
-        List<Double> result = new ArrayList<>();
-        result.add(mean(lats));
-        result.add(mean(lons));
-        return result;
+    if ( place.getBoundingBox() == null ) {
+      return new ArrayList<>();
+    }
+    if ( place.getBoundingBox().getCoordinates().size() != 1 ) {
+      return new ArrayList<>();
+    }
+    if ( place.getBoundingBox().getCoordinates().get(0).size() != 4 ) {
+      return new ArrayList<>();
+    }
+    List<Double> lats = new ArrayList<>();
+    List<Double> lons = new ArrayList<>();
+    for ( List<Double> point : place.getBoundingBox().getCoordinates().get(0)) {
+      lats.add(point.get(0));
+      lons.add(point.get(1));
     }
+    List<Double> result = new ArrayList<>();
+    result.add(mean(lats));
+    result.add(mean(lons));
+    return result;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/FetchAndReplaceTwitterProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/FetchAndReplaceTwitterProcessor.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/FetchAndReplaceTwitterProcessor.java
index 046cb76..c1c205b 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/FetchAndReplaceTwitterProcessor.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/FetchAndReplaceTwitterProcessor.java
@@ -19,9 +19,6 @@
 
 package org.apache.streams.twitter.processor;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
-import java.util.List;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
@@ -31,11 +28,14 @@ import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.twitter.TwitterConfiguration;
 import org.apache.streams.twitter.TwitterStreamConfiguration;
+import org.apache.streams.twitter.converter.TwitterDocumentClassifier;
 import org.apache.streams.twitter.pojo.Delete;
 import org.apache.streams.twitter.pojo.Retweet;
 import org.apache.streams.twitter.pojo.Tweet;
-import org.apache.streams.twitter.provider.TwitterEventClassifier;
 import org.apache.streams.twitter.provider.TwitterProviderUtil;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import twitter4j.Status;
@@ -45,6 +45,8 @@ import twitter4j.TwitterFactory;
 import twitter4j.TwitterObjectFactory;
 import twitter4j.conf.ConfigurationBuilder;
 
+import java.util.List;
+
 import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.getProvider;
 import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.updateActivity;
 
@@ -54,132 +56,132 @@ import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.upda
  */
 public class FetchAndReplaceTwitterProcessor implements StreamsProcessor {
 
-    private static final String PROVIDER_ID = getProvider().getId();
-    private static final Logger LOGGER = LoggerFactory.getLogger(FetchAndReplaceTwitterProcessor.class);
-
-    //Default number of attempts before allowing the document through
-    private static final int MAX_ATTEMPTS = 5;
-    //Start the backoff at 4 minutes.  This results in a wait period of 4, 8, 12, 16 & 20 min with an attempt of 5
-    public static final int BACKOFF = 1000 * 60 * 4;
-
-    private final TwitterConfiguration config;
-    private Twitter client;
-    private ObjectMapper mapper;
-    private int retryCount;
-
-    public FetchAndReplaceTwitterProcessor() {
-        this(new ComponentConfigurator<>(TwitterStreamConfiguration.class).detectConfiguration(StreamsConfigurator.config, "twitter"));
-    }
-
-    public FetchAndReplaceTwitterProcessor(TwitterStreamConfiguration config) {
-        this.config = config;
+  private static final String PROVIDER_ID = getProvider().getId();
+  private static final Logger LOGGER = LoggerFactory.getLogger(FetchAndReplaceTwitterProcessor.class);
+
+  //Default number of attempts before allowing the document through
+  private static final int MAX_ATTEMPTS = 5;
+  //Start the backoff at 4 minutes.  This results in a wait period of 4, 8, 12, 16 & 20 min with an attempt of 5
+  public static final int BACKOFF = 1000 * 60 * 4;
+
+  private final TwitterConfiguration config;
+  private Twitter client;
+  private ObjectMapper mapper;
+  private int retryCount;
+
+  public FetchAndReplaceTwitterProcessor() {
+    this(new ComponentConfigurator<>(TwitterStreamConfiguration.class).detectConfiguration(StreamsConfigurator.config, "twitter"));
+  }
+
+  public FetchAndReplaceTwitterProcessor(TwitterStreamConfiguration config) {
+    this.config = config;
+  }
+
+  @Override
+  public String getId() {
+    return getProvider().getId();
+  }
+
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    if (entry.getDocument() instanceof Activity) {
+      Activity doc = (Activity)entry.getDocument();
+      String originalId = doc.getId();
+      if (PROVIDER_ID.equals(doc.getProvider().getId())) {
+        fetchAndReplace(doc, originalId);
+      }
+    } else {
+      throw new IllegalStateException("Requires an activity document");
     }
-
-    @Override
-    public String getId() {
-        return getProvider().getId();
+    return Lists.newArrayList(entry);
+  }
+
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.client = getTwitterClient();
+    this.mapper = StreamsJacksonMapper.getInstance();
+  }
+
+  @Override
+  public void cleanUp() {
+
+  }
+
+  protected void fetchAndReplace(Activity doc, String originalId) {
+    try {
+      String json = fetch(doc);
+      replace(doc, json);
+      doc.setId(originalId);
+      retryCount = 0;
+    } catch (TwitterException tw) {
+      if (tw.exceededRateLimitation()) {
+        sleepAndTryAgain(doc, originalId);
+      }
+    } catch (Exception ex) {
+      LOGGER.warn("Error fetching and replacing tweet for activity {}", doc.getId());
     }
-
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        if(entry.getDocument() instanceof Activity) {
-            Activity doc = (Activity)entry.getDocument();
-            String originalId = doc.getId();
-            if(PROVIDER_ID.equals(doc.getProvider().getId())) {
-                fetchAndReplace(doc, originalId);
-            }
-        } else {
-            throw new IllegalStateException("Requires an activity document");
-        }
-        return Lists.newArrayList(entry);
+  }
+
+  protected void replace(Activity doc, String json) throws java.io.IOException, ActivityConversionException {
+    Class documentSubType = new TwitterDocumentClassifier().detectClasses(json).get(0);
+    Object object = mapper.readValue(json, documentSubType);
+
+    if (documentSubType.equals(Retweet.class) || documentSubType.equals(Tweet.class)) {
+      updateActivity((Tweet)object, doc);
+    } else if (documentSubType.equals(Delete.class)) {
+      updateActivity((Delete)object, doc);
+    } else {
+      LOGGER.info("Could not determine the correct update method for {}", documentSubType);
     }
+  }
 
+  protected String fetch(Activity doc) throws TwitterException {
+    String id = doc.getObject().getId();
+    LOGGER.debug("Fetching status from Twitter for {}", id);
+    Long tweetId = Long.valueOf(id.replace("id:twitter:tweets:", ""));
+    Status status = getTwitterClient().showStatus(tweetId);
+    return TwitterObjectFactory.getRawJSON(status);
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        this.client = getTwitterClient();
-        this.mapper = StreamsJacksonMapper.getInstance();
-    }
 
-    @Override
-    public void cleanUp() {
+  protected Twitter getTwitterClient() {
 
-    }
+    if (this.client == null) {
 
-    protected void fetchAndReplace(Activity doc, String originalId) {
-        try {
-            String json = fetch(doc);
-            replace(doc, json);
-            doc.setId(originalId);
-            retryCount = 0;
-        } catch(TwitterException tw) {
-            if(tw.exceededRateLimitation()) {
-                sleepAndTryAgain(doc, originalId);
-            }
-        } catch (Exception e) {
-            LOGGER.warn("Error fetching and replacing tweet for activity {}", doc.getId());
-        }
-    }
+      String baseUrl = TwitterProviderUtil.baseUrl(config);
 
-    protected void replace(Activity doc, String json) throws java.io.IOException, ActivityConversionException {
-        Class documentSubType = TwitterEventClassifier.detectClass(json);
-        Object object = mapper.readValue(json, documentSubType);
-
-        if(documentSubType.equals(Retweet.class) || documentSubType.equals(Tweet.class)) {
-            updateActivity((Tweet)object, doc);
-        } else if(documentSubType.equals(Delete.class)) {
-            updateActivity((Delete)object, doc);
-        } else {
-            LOGGER.info("Could not determine the correct update method for {}", documentSubType);
-        }
-    }
+      ConfigurationBuilder builder = new ConfigurationBuilder()
+          .setOAuthConsumerKey(config.getOauth().getConsumerKey())
+          .setOAuthConsumerSecret(config.getOauth().getConsumerSecret())
+          .setOAuthAccessToken(config.getOauth().getAccessToken())
+          .setOAuthAccessTokenSecret(config.getOauth().getAccessTokenSecret())
+          .setIncludeEntitiesEnabled(true)
+          .setJSONStoreEnabled(true)
+          .setAsyncNumThreads(1)
+          .setRestBaseURL(baseUrl)
+          .setIncludeMyRetweetEnabled(Boolean.TRUE)
+          .setPrettyDebugEnabled(Boolean.TRUE);
 
-    protected String fetch(Activity doc) throws TwitterException {
-        String id = doc.getObject().getId();
-        LOGGER.debug("Fetching status from Twitter for {}", id);
-        Long tweetId = Long.valueOf(id.replace("id:twitter:tweets:", ""));
-        Status status = getTwitterClient().showStatus(tweetId);
-        return TwitterObjectFactory.getRawJSON(status);
+      this.client = new TwitterFactory(builder.build()).getInstance();
     }
-
-
-    protected Twitter getTwitterClient()
-    {
-        if(this.client == null) {
-
-            String baseUrl = TwitterProviderUtil.baseUrl(config);
-
-            ConfigurationBuilder builder = new ConfigurationBuilder()
-                    .setOAuthConsumerKey(config.getOauth().getConsumerKey())
-                    .setOAuthConsumerSecret(config.getOauth().getConsumerSecret())
-                    .setOAuthAccessToken(config.getOauth().getAccessToken())
-                    .setOAuthAccessTokenSecret(config.getOauth().getAccessTokenSecret())
-                    .setIncludeEntitiesEnabled(true)
-                    .setJSONStoreEnabled(true)
-                    .setAsyncNumThreads(1)
-                    .setRestBaseURL(baseUrl)
-                    .setIncludeMyRetweetEnabled(Boolean.TRUE)
-                    .setPrettyDebugEnabled(Boolean.TRUE);
-
-            this.client = new TwitterFactory(builder.build()).getInstance();
-        }
-        return this.client;
-    }
-
-    //Hardcore sleep to allow for catch up
-    protected void sleepAndTryAgain(Activity doc, String originalId) {
-        try {
-            //Attempt to fetchAndReplace with a backoff up to the limit then just reset the count and let the process continue
-            if(retryCount < MAX_ATTEMPTS) {
-                retryCount++;
-                LOGGER.debug("Sleeping for {} min due to excessive calls to Twitter API", (retryCount * 4));
-                Thread.sleep(BACKOFF * retryCount);
-                fetchAndReplace(doc, originalId);
-            } else {
-                retryCount = 0;
-            }
-        } catch (InterruptedException e) {
-            LOGGER.warn("Thread sleep interrupted while waiting for twitter backoff");
-        }
+    return this.client;
+  }
+
+  //Hardcore sleep to allow for catch up
+  protected void sleepAndTryAgain(Activity doc, String originalId) {
+    try {
+      //Attempt to fetchAndReplace with a backoff up to the limit then just reset the count and let the process continue
+      if (retryCount < MAX_ATTEMPTS) {
+        retryCount++;
+        LOGGER.debug("Sleeping for {} min due to excessive calls to Twitter API", (retryCount * 4));
+        Thread.sleep(BACKOFF * retryCount);
+        fetchAndReplace(doc, originalId);
+      } else {
+        retryCount = 0;
+      }
+    } catch (InterruptedException ex) {
+      LOGGER.warn("Thread sleep interrupted while waiting for twitter backoff");
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterEventProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterEventProcessor.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterEventProcessor.java
deleted file mode 100644
index ed6b90a..0000000
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterEventProcessor.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.twitter.processor;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.twitter.converter.StreamsTwitterMapper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-
-/**
- * This class performs conversion of a twitter event to a specified outClass
- *
- * Deprecated: use TypeConverterProcessor and ActivityConverterProcessor instead
- */
-@Deprecated
-public class TwitterEventProcessor implements StreamsProcessor {
-
-    private final static String STREAMS_ID = "TwitterEventProcessor";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterEventProcessor.class);
-
-    private ObjectMapper mapper = new StreamsTwitterMapper();
-
-    private Class inClass;
-    private Class outClass;
-
-    public TwitterEventProcessor(Class inClass, Class outClass) {
-        this.inClass = inClass;
-        this.outClass = outClass;
-    }
-
-    public TwitterEventProcessor( Class outClass) {
-        this(null, outClass);
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-
-        LOGGER.error("You are calling a deprecated / defunct class.  Modify your stream to use ActivityConverterProcessor.");
-
-        LOGGER.debug("CONVERT FAILED");
-
-        return Lists.newArrayList();
-
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        mapper = StreamsJacksonMapper.getInstance();
-    }
-
-    @Override
-    public void cleanUp() {
-
-    }
-};

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterProfileProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterProfileProcessor.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterProfileProcessor.java
deleted file mode 100644
index d49a54f..0000000
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterProfileProcessor.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.twitter.processor;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
-import org.apache.streams.twitter.pojo.Retweet;
-import org.apache.streams.twitter.pojo.Tweet;
-import org.apache.streams.twitter.pojo.User;
-import org.apache.streams.twitter.provider.TwitterEventClassifier;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Queue;
-import java.util.Random;
-
-public class TwitterProfileProcessor implements StreamsProcessor, Runnable {
-
-    private final static String STREAMS_ID = "TwitterProfileProcessor";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterProfileProcessor.class);
-
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance(TwitterDateTimeFormat.TWITTER_FORMAT);
-
-    private Queue<StreamsDatum> inQueue;
-    private Queue<StreamsDatum> outQueue;
-
-    private final static String TERMINATE = "TERMINATE";
-
-    @Override
-    public void run() {
-
-        while(true) {
-            StreamsDatum item;
-            try {
-                item = inQueue.poll();
-                if(item.getDocument() instanceof String && item.equals(TERMINATE)) {
-                    LOGGER.info("Terminating!");
-                    break;
-                }
-
-                Thread.sleep(new Random().nextInt(100));
-
-                for( StreamsDatum entry : process(item)) {
-                    outQueue.offer(entry);
-                }
-
-
-            } catch (Exception e) {
-                e.printStackTrace();
-
-            }
-        }
-    }
-
-    public StreamsDatum createStreamsDatum(User user) {
-        return new StreamsDatum(user, user.getIdStr());
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-
-        List<StreamsDatum> result = new ArrayList<>();
-        String item;
-        try {
-            // first check for valid json
-            // since data is coming from outside provider, we don't know what type the events are
-            if( entry.getDocument() instanceof String) {
-                item = (String) entry.getDocument();
-            } else {
-                item = mapper.writeValueAsString(entry.getDocument());
-            }
-
-            Class inClass = TwitterEventClassifier.detectClass(item);
-
-            User user;
-
-            if ( inClass.equals( Tweet.class )) {
-                LOGGER.debug("TWEET");
-                Tweet tweet = mapper.readValue(item, Tweet.class);
-                user = tweet.getUser();
-                result.add(createStreamsDatum(user));
-            }
-            else if ( inClass.equals( Retweet.class )) {
-                LOGGER.debug("RETWEET");
-                Retweet retweet = mapper.readValue(item, Retweet.class);
-                user = retweet.getRetweetedStatus().getUser();
-                result.add(createStreamsDatum(user));
-            } else if ( inClass.equals( User.class )) {
-                LOGGER.debug("USER");
-                user = mapper.readValue(item, User.class);
-                result.add(createStreamsDatum(user));
-            } else {
-                return new ArrayList<>();
-            }
-
-            return result;
-        } catch (Exception e) {
-            e.printStackTrace();
-            LOGGER.warn("Error processing " + entry.toString());
-            return new ArrayList<>();
-        }
-    }
-
-    @Override
-    public void prepare(Object o) {
-
-    }
-
-    @Override
-    public void cleanUp() {
-
-    }
-};

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterTypeConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterTypeConverter.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterTypeConverter.java
index cc1ecb1..d51e4e7 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterTypeConverter.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterTypeConverter.java
@@ -21,13 +21,14 @@ package org.apache.streams.twitter.processor;
 import org.apache.streams.converter.ActivityConverterProcessor;
 
 /**
- * This class performs conversion of a twitter event to a specified outClass
+ * This class performs conversion of a twitter event to a specified outClass.
  *
+ * <p/>
  * Deprecated: use TypeConverterProcessor and ActivityConverterProcessor instead
  */
 @Deprecated
 public class TwitterTypeConverter extends ActivityConverterProcessor {
 
-    public final static String STREAMS_ID = "TwitterTypeConverter";
+  public static final String STREAMS_ID = "TwitterTypeConverter";
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterUrlApiProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterUrlApiProcessor.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterUrlApiProcessor.java
index 30db471..0dd43bb 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterUrlApiProcessor.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/processor/TwitterUrlApiProcessor.java
@@ -18,63 +18,71 @@
 
 package org.apache.streams.twitter.processor;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import org.apache.streams.components.http.HttpProcessorConfiguration;
 import org.apache.streams.components.http.processor.SimpleHTTPGetProcessor;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.pojo.json.Activity;
 
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 /**
- * Class gets a global share count from Twitter API for links on Activity datums
+ * Class gets a global share count from Twitter API for links on Activity datums.
  */
 public class TwitterUrlApiProcessor extends SimpleHTTPGetProcessor implements StreamsProcessor {
 
-    private final static String STREAMS_ID = "TwitterUrlApiProcessor";
+  private static final String STREAMS_ID = "TwitterUrlApiProcessor";
 
-    public TwitterUrlApiProcessor() {
-        super();
-        this.configuration.setHostname("urls.api.twitter.com");
-        this.configuration.setResourcePath("/1/urls/count.json");
-        this.configuration.setEntity(HttpProcessorConfiguration.Entity.ACTIVITY);
-        this.configuration.setExtension("twitter_url_count");
-    }
+  /**
+   * TwitterUrlApiProcessor constructor.
+   */
+  public TwitterUrlApiProcessor() {
+    super();
+    this.configuration.setHostname("urls.api.twitter.com");
+    this.configuration.setResourcePath("/1/urls/count.json");
+    this.configuration.setEntity(HttpProcessorConfiguration.Entity.ACTIVITY);
+    this.configuration.setExtension("twitter_url_count");
+  }
 
-    public TwitterUrlApiProcessor(HttpProcessorConfiguration processorConfiguration) {
-        super(processorConfiguration);
-        this.configuration.setHostname("urls.api.twitter.com");
-        this.configuration.setResourcePath("/1/urls/count.json");
-        this.configuration.setEntity(HttpProcessorConfiguration.Entity.ACTIVITY);
-        this.configuration.setExtension("twitter_url_count");
-    }
+  /**
+   * TwitterUrlApiProcessor constructor.
+   */
+  public TwitterUrlApiProcessor(HttpProcessorConfiguration processorConfiguration) {
+    super(processorConfiguration);
+    this.configuration.setHostname("urls.api.twitter.com");
+    this.configuration.setResourcePath("/1/urls/count.json");
+    this.configuration.setEntity(HttpProcessorConfiguration.Entity.ACTIVITY);
+    this.configuration.setExtension("twitter_url_count");
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        Preconditions.checkArgument(entry.getDocument() instanceof Activity);
-        Activity activity = mapper.convertValue(entry.getDocument(), Activity.class);
-        if( activity.getLinks() != null && activity.getLinks().size() > 0)
-            return super.process(entry);
-        else
-            return Lists.newArrayList(entry);
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    Preconditions.checkArgument(entry.getDocument() instanceof Activity);
+    Activity activity = mapper.convertValue(entry.getDocument(), Activity.class);
+    if ( activity.getLinks() != null && activity.getLinks().size() > 0) {
+      return super.process(entry);
+    } else {
+      return Lists.newArrayList(entry);
     }
+  }
 
-    @Override
-    protected Map<String, String> prepareParams(StreamsDatum entry) {
+  @Override
+  protected Map<String, String> prepareParams(StreamsDatum entry) {
 
-        Map<String, String> params = new HashMap<>();
+    Map<String, String> params = new HashMap<>();
 
-        params.put("url", mapper.convertValue(entry.getDocument(), Activity.class).getLinks().get(0));
+    params.put("url", mapper.convertValue(entry.getDocument(), Activity.class).getLinks().get(0));
 
-        return params;
-    }
+    return params;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterErrorHandler.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterErrorHandler.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterErrorHandler.java
index 90f6b62..ec43fba 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterErrorHandler.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterErrorHandler.java
@@ -21,120 +21,113 @@ package org.apache.streams.twitter.provider;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.twitter.TwitterConfiguration;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import twitter4j.RateLimitStatus;
 import twitter4j.Twitter;
 import twitter4j.TwitterException;
-import twitter4j.RateLimitStatus;
 
 /**
  *  Handle expected and unexpected exceptions.
  */
-public class TwitterErrorHandler
-{
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterErrorHandler.class);
-
-    // selected because 3 * 5 + n >= 15 for positive n
-    protected static long retry =
-            new ComponentConfigurator<TwitterConfiguration>(TwitterConfiguration.class).detectConfiguration(
-                    StreamsConfigurator.getConfig().getConfig("twitter")
-            ).getRetrySleepMs();
-    protected static long retryMax =
-            new ComponentConfigurator<TwitterConfiguration>(TwitterConfiguration.class).detectConfiguration(
-                    StreamsConfigurator.getConfig().getConfig("twitter")
-            ).getRetryMax();
-
-    @Deprecated
-    public static int handleTwitterError(Twitter twitter, Exception exception) {
-        return handleTwitterError( twitter, null, exception);
-    }
+public class TwitterErrorHandler {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterErrorHandler.class);
+
+  // selected because 3 * 5 + n >= 15 for positive n
+  protected static long retry =
+      new ComponentConfigurator<TwitterConfiguration>(TwitterConfiguration.class).detectConfiguration(
+          StreamsConfigurator.getConfig().getConfig("twitter")
+      ).getRetrySleepMs();
+  protected static long retryMax =
+      new ComponentConfigurator<TwitterConfiguration>(TwitterConfiguration.class).detectConfiguration(
+          StreamsConfigurator.getConfig().getConfig("twitter")
+      ).getRetryMax();
+
+  @Deprecated
+  public static int handleTwitterError(Twitter twitter, Exception exception) {
+    return handleTwitterError( twitter, null, exception);
+  }
+
+  /**
+   * handleTwitterError.
+   * @param twitter Twitter
+   * @param id id
+   * @param exception exception
+   * @return
+   */
+  public static int handleTwitterError(Twitter twitter, Long id, Exception exception) {
+
+    if (exception instanceof TwitterException) {
+      TwitterException twitterException = (TwitterException)exception;
+
+      if (twitterException.exceededRateLimitation()) {
+
+        long millisUntilReset = retry;
+
+        final RateLimitStatus rateLimitStatus = twitterException.getRateLimitStatus();
+        if (rateLimitStatus != null) {
+          millisUntilReset = rateLimitStatus.getSecondsUntilReset() * 1000;
+        }
+
+        LOGGER.warn("Rate Limit Exceeded. Will retry in {} seconds...", millisUntilReset / 1000);
+
+        try {
+          Thread.sleep(millisUntilReset);
+        } catch (InterruptedException e1) {
+          Thread.currentThread().interrupt();
+        }
 
-    public static int handleTwitterError(Twitter twitter, Long id, Exception exception)
-    {
-        if(exception instanceof TwitterException)
-        {
-            TwitterException e = (TwitterException)exception;
-            if(e.exceededRateLimitation())
-            {
-                long millisUntilReset = retry;
-
-                final RateLimitStatus rateLimitStatus = e.getRateLimitStatus();
-                if (rateLimitStatus != null) {
-                    millisUntilReset = rateLimitStatus.getSecondsUntilReset() * 1000;
-                }
-
-                LOGGER.warn("Rate Limit Exceeded. Will retry in {} seconds...", millisUntilReset / 1000);
-
-                try {
-                    Thread.sleep(millisUntilReset);
-                } catch (InterruptedException e1) {
-                    Thread.currentThread().interrupt();
-                }
-
-                return 1;
-            }
-            else if(e.isCausedByNetworkIssue())
-            {
-                LOGGER.info("Twitter Network Issues Detected. Backing off...");
-                LOGGER.info("{} - {}", e.getExceptionCode(), e.getLocalizedMessage());
-                try {
-                    Thread.sleep(retry);
-                } catch (InterruptedException e1) {
-                    Thread.currentThread().interrupt();
-                }
-                return 1;
-            }
-            else if(e.isErrorMessageAvailable())
-            {
-                if(e.getMessage().toLowerCase().contains("does not exist"))
-                {
-                    if( id != null )
-                        LOGGER.warn("User does not exist: {}", id);
-                    else
-                        LOGGER.warn("User does not exist");
-                    return (int)retryMax;
-                }
-                else
-                {
-                    return (int)retryMax/3;
-                }
-            }
-            else
-            {
-                if(e.getExceptionCode().equals("ced778ef-0c669ac0"))
-                {
-                    // This is a known weird issue, not exactly sure the cause, but you'll never be able to get the data.
-                    return (int)retryMax/3;
-                }
-                else if(e.getExceptionCode().equals("4be80492-0a7bf7c7")) {
-                    // This is a 401 reflecting credentials don't have access to the requested resource.
-                    if( id != null )
-                        LOGGER.warn("Authentication Exception accessing id: {}", id);
-                    else
-                        LOGGER.warn("Authentication Exception");
-                    return (int)retryMax;
-                }
-                else
-                {
-                    LOGGER.warn("Unknown Twitter Exception...");
-                    LOGGER.warn("  Account: {}", twitter);
-                    LOGGER.warn("   Access: {}", e.getAccessLevel());
-                    LOGGER.warn("     Code: {}", e.getExceptionCode());
-                    LOGGER.warn("  Message: {}", e.getLocalizedMessage());
-                    return (int)retryMax/10;
-                }
-            }
+        return 1;
+      } else if (twitterException.isCausedByNetworkIssue()) {
+        LOGGER.info("Twitter Network Issues Detected. Backing off...");
+        LOGGER.info("{} - {}", twitterException.getExceptionCode(), twitterException.getLocalizedMessage());
+        try {
+          Thread.sleep(retry);
+        } catch (InterruptedException e1) {
+          Thread.currentThread().interrupt();
         }
-        else if(exception instanceof RuntimeException)
-        {
-            LOGGER.warn("TwitterGrabber: Unknown Runtime Error", exception.getMessage());
-            return (int)retryMax/3;
+        return 1;
+      } else if (twitterException.isErrorMessageAvailable()) {
+        if (twitterException.getMessage().toLowerCase().contains("does not exist")) {
+          if ( id != null ) {
+            LOGGER.warn("User does not exist: {}", id);
+          } else {
+            LOGGER.warn("User does not exist");
+          }
+          return (int)retryMax;
+        } else {
+          return (int)retryMax / 3;
         }
-        else
-        {
-            LOGGER.info("Completely Unknown Exception: {}", exception);
-            return (int)retryMax/3;
+      } else {
+        if (twitterException.getExceptionCode().equals("ced778ef-0c669ac0")) {
+          // This is a known weird issue, not exactly sure the cause, but you'll never be able to get the data.
+          return (int)retryMax / 3;
+        } else if (twitterException.getExceptionCode().equals("4be80492-0a7bf7c7")) {
+          // This is a 401 reflecting credentials don't have access to the requested resource.
+          if ( id != null ) {
+            LOGGER.warn("Authentication Exception accessing id: {}", id);
+          } else {
+            LOGGER.warn("Authentication Exception");
+          }
+          return (int)retryMax;
+        } else {
+          LOGGER.warn("Unknown Twitter Exception...");
+          LOGGER.warn("  Account: {}", twitter);
+          LOGGER.warn("   Access: {}", twitterException.getAccessLevel());
+          LOGGER.warn("     Code: {}", twitterException.getExceptionCode());
+          LOGGER.warn("  Message: {}", twitterException.getLocalizedMessage());
+          return (int)retryMax / 10;
         }
+      }
+    } else if (exception instanceof RuntimeException) {
+      LOGGER.warn("TwitterGrabber: Unknown Runtime Error", exception.getMessage());
+      return (int)retryMax / 3;
+    } else {
+      LOGGER.info("Completely Unknown Exception: {}", exception);
+      return (int)retryMax / 3;
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterEventClassifier.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterEventClassifier.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterEventClassifier.java
deleted file mode 100644
index 9466c2e..0000000
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterEventClassifier.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.twitter.provider;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.StringUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
-import org.apache.streams.twitter.pojo.Delete;
-import org.apache.streams.twitter.pojo.FriendList;
-import org.apache.streams.twitter.pojo.Retweet;
-import org.apache.streams.twitter.pojo.Tweet;
-import org.apache.streams.twitter.pojo.User;
-import org.apache.streams.twitter.pojo.UserstreamEvent;
-
-import java.io.IOException;
-import java.io.Serializable;
-
-/**
- * TwitterEventClassifier classifies twitter events
- *
- * @Deprecated - replaced by TwitterDocumentClassifier - use ActivityConverterProcessor
- */
-public class TwitterEventClassifier implements Serializable {
-
-    private static ObjectMapper mapper = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
-
-    public static Class detectClass( String json ) {
-        Preconditions.checkNotNull(json);
-        Preconditions.checkArgument(StringUtils.isNotEmpty(json));
-
-        ObjectNode objectNode;
-        try {
-            objectNode = (ObjectNode) mapper.readTree(json);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return null;
-        }
-
-        if( objectNode.findValue("retweeted_status") != null && objectNode.get("retweeted_status") != null)
-            return Retweet.class;
-        else if( objectNode.findValue("delete") != null )
-            return Delete.class;
-        else if( objectNode.findValue("friends") != null ||
-                objectNode.findValue("friends_str") != null )
-            return FriendList.class;
-        else if( objectNode.findValue("target_object") != null )
-            return UserstreamEvent.class;
-        else if ( objectNode.findValue("location") != null && objectNode.findValue("user") == null)
-            return User.class;
-        else
-            return Tweet.class;
-    }
-
-}


[08/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/LocalStreamBuilder.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/LocalStreamBuilder.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/LocalStreamBuilder.java
index f6fcaac..9e5089e 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/LocalStreamBuilder.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/LocalStreamBuilder.java
@@ -18,25 +18,38 @@
 
 package org.apache.streams.local.builders;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.util.concurrent.Uninterruptibles;
-import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.*;
+import org.apache.streams.core.DatumStatusCountable;
+import org.apache.streams.core.StreamBuilder;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistWriter;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.local.LocalRuntimeConfiguration;
 import org.apache.streams.local.counters.StreamsTaskCounter;
 import org.apache.streams.local.executors.ShutdownStreamOnUnhandleThrowableThreadPoolExecutor;
 import org.apache.streams.local.monitoring.MonitoringConfiguration;
 import org.apache.streams.local.queues.ThroughputQueue;
-import org.apache.streams.local.tasks.*;
+import org.apache.streams.local.tasks.BaseStreamsTask;
+import org.apache.streams.local.tasks.LocalStreamProcessMonitorThread;
+import org.apache.streams.local.tasks.StatusCounterMonitorThread;
+import org.apache.streams.local.tasks.StreamsProviderTask;
+import org.apache.streams.local.tasks.StreamsTask;
 import org.apache.streams.monitoring.tasks.BroadcastMonitorThread;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.util.concurrent.Uninterruptibles;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 
 import java.math.BigInteger;
-import java.util.*;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -49,452 +62,452 @@ import java.util.concurrent.TimeUnit;
  */
 public class LocalStreamBuilder implements StreamBuilder {
 
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(LocalStreamBuilder.class);
-    private static final int DEFAULT_QUEUE_SIZE = 500;
-
-    public static final String TIMEOUT_KEY = "TIMEOUT";
-    public static final String BROADCAST_KEY = "broadcastURI";
-    public static final String STREAM_IDENTIFIER_KEY = "streamsID";
-    public static final String BROADCAST_INTERVAL_KEY = "monitoring_broadcast_interval_ms";
-    public static final String DEFAULT_STREAM_IDENTIFIER = "Unknown_Stream";
-    public static final String DEFAULT_STARTED_AT_KEY = "startedAt";
-
-    private Map<String, StreamComponent> providers;
-    private Map<String, StreamComponent> components;
-    private LocalRuntimeConfiguration streamConfig;
-    private Map<StreamsTask, Future> futures;
-    private ExecutorService executor;
-    private ExecutorService monitor;
-    private int totalTasks;
-    private int monitorTasks;
-    private LocalStreamProcessMonitorThread monitorThread;
-    private Map<String, List<StreamsTask>> tasks;
-    private Thread shutdownHook;
-    private BroadcastMonitorThread broadcastMonitor;
-    private int maxQueueCapacity;
-    private String streamIdentifier = DEFAULT_STREAM_IDENTIFIER;
-    private DateTime startedAt = new DateTime();
-    private boolean useDeprecatedMonitors;
-
-    /**
-     * Creates a local stream builder with all configuration resolved by typesafe
-     */
-    public LocalStreamBuilder() {
-        this(new ObjectMapper().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class));
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(LocalStreamBuilder.class);
+  private static final int DEFAULT_QUEUE_SIZE = 500;
+
+  public static final String TIMEOUT_KEY = "TIMEOUT";
+  public static final String BROADCAST_KEY = "broadcastURI";
+  public static final String STREAM_IDENTIFIER_KEY = "streamsID";
+  public static final String BROADCAST_INTERVAL_KEY = "monitoring_broadcast_interval_ms";
+  public static final String DEFAULT_STREAM_IDENTIFIER = "Unknown_Stream";
+  public static final String DEFAULT_STARTED_AT_KEY = "startedAt";
+
+  private Map<String, StreamComponent> providers;
+  private Map<String, StreamComponent> components;
+  private LocalRuntimeConfiguration streamConfig;
+  private Map<StreamsTask, Future> futures;
+  private ExecutorService executor;
+  private ExecutorService monitor;
+  private int totalTasks;
+  private int monitorTasks;
+  private LocalStreamProcessMonitorThread monitorThread;
+  private Map<String, List<StreamsTask>> tasks;
+  private Thread shutdownHook;
+  private BroadcastMonitorThread broadcastMonitor;
+  private int maxQueueCapacity;
+  private String streamIdentifier = DEFAULT_STREAM_IDENTIFIER;
+  private DateTime startedAt = new DateTime();
+  private boolean useDeprecatedMonitors;
+
+  /**
+   * Creates a local stream builder with all configuration resolved by typesafe
+   */
+  public LocalStreamBuilder() {
+    this(new ObjectMapper().convertValue(StreamsConfigurator.detectConfiguration(), LocalRuntimeConfiguration.class));
+  }
+
+  /**
+   * Creates a local stream builder with a config object and default maximum internal queue size of 500
+   * @param streamConfig
+   * @deprecated use LocalRuntimeConfiguration constructor instead
+   */
+  @Deprecated
+  public LocalStreamBuilder(Map<String, Object> streamConfig) {
+    this(DEFAULT_QUEUE_SIZE, streamConfig);
+  }
+
+  /**
+   * Creates a local stream builder with no config object. If maxQueueCapacity is less than 1 the queue is
+   * unbounded.
+   * @param maxQueueCapacity
+   *
+   * @deprecated use LocalRuntimeConfiguration constructor instead
+   */
+  @Deprecated
+  public LocalStreamBuilder(int maxQueueCapacity) {
+    this(maxQueueCapacity, null);
+  }
+
+  /**
+   * Creates a local stream builder with a config object. If maxQueueCapacity is less than 1 the queue is
+   * unbounded.
+   *
+   * @param maxQueueCapacity
+   * @param streamConfig
+   *
+   * @deprecated use LocalRuntimeConfiguration constructor instead
+   */
+  @Deprecated
+  public LocalStreamBuilder(int maxQueueCapacity, Map<String, Object> streamConfig) {
+    this(new LocalRuntimeConfiguration());
+    this.streamConfig.setQueueSize(new Long(maxQueueCapacity));
+    if( streamConfig != null && streamConfig.get(LocalStreamBuilder.TIMEOUT_KEY) != null )
+      this.streamConfig.setProviderTimeoutMs(new Long((Integer) (streamConfig.get(LocalStreamBuilder.TIMEOUT_KEY))));
+    if( streamConfig != null && streamConfig.get(LocalStreamBuilder.STREAM_IDENTIFIER_KEY) != null )
+      this.streamConfig.setIdentifier((String)streamConfig.get(LocalStreamBuilder.STREAM_IDENTIFIER_KEY));
+    if( streamConfig != null && streamConfig.get(LocalStreamBuilder.BROADCAST_KEY) != null ) {
+      MonitoringConfiguration monitoringConfiguration = new MonitoringConfiguration();
+      monitoringConfiguration.setBroadcastURI((String)streamConfig.get(LocalStreamBuilder.BROADCAST_KEY));
+      if(streamConfig.get(LocalStreamBuilder.BROADCAST_INTERVAL_KEY) != null)
+        monitoringConfiguration.setMonitoringBroadcastIntervalMs(Long.parseLong((String)streamConfig.get(LocalStreamBuilder.BROADCAST_INTERVAL_KEY)));
+      this.streamConfig.setMonitoring(monitoringConfiguration);
     }
-
-    /**
-     * Creates a local stream builder with a config object and default maximum internal queue size of 500
-     * @param streamConfig
-     * @deprecated use LocalRuntimeConfiguration constructor instead
-     */
-    @Deprecated
-    public LocalStreamBuilder(Map<String, Object> streamConfig) {
-        this(DEFAULT_QUEUE_SIZE, streamConfig);
-    }
-
-    /**
-     * Creates a local stream builder with no config object. If maxQueueCapacity is less than 1 the queue is
-     * unbounded.
-     * @param maxQueueCapacity
-     *
-     * @deprecated use LocalRuntimeConfiguration constructor instead
-     */
-    @Deprecated
-    public LocalStreamBuilder(int maxQueueCapacity) {
-        this(maxQueueCapacity, null);
-    }
-
-    /**
-     * Creates a local stream builder with a config object. If maxQueueCapacity is less than 1 the queue is
-     * unbounded.
-     *
-     * @param maxQueueCapacity
-     * @param streamConfig
-     *
-     * @deprecated use LocalRuntimeConfiguration constructor instead
-     */
-    @Deprecated
-    public LocalStreamBuilder(int maxQueueCapacity, Map<String, Object> streamConfig) {
-        this(new LocalRuntimeConfiguration());
-        this.streamConfig.setQueueSize(new Long(maxQueueCapacity));
-        if( streamConfig != null && streamConfig.get(LocalStreamBuilder.TIMEOUT_KEY) != null )
-            this.streamConfig.setProviderTimeoutMs(new Long((Integer) (streamConfig.get(LocalStreamBuilder.TIMEOUT_KEY))));
-        if( streamConfig != null && streamConfig.get(LocalStreamBuilder.STREAM_IDENTIFIER_KEY) != null )
-            this.streamConfig.setIdentifier((String)streamConfig.get(LocalStreamBuilder.STREAM_IDENTIFIER_KEY));
-        if( streamConfig != null && streamConfig.get(LocalStreamBuilder.BROADCAST_KEY) != null ) {
-            MonitoringConfiguration monitoringConfiguration = new MonitoringConfiguration();
-            monitoringConfiguration.setBroadcastURI((String)streamConfig.get(LocalStreamBuilder.BROADCAST_KEY));
-            if(streamConfig.get(LocalStreamBuilder.BROADCAST_INTERVAL_KEY) != null)
-                monitoringConfiguration.setMonitoringBroadcastIntervalMs(Long.parseLong((String)streamConfig.get(LocalStreamBuilder.BROADCAST_INTERVAL_KEY)));
-            this.streamConfig.setMonitoring(monitoringConfiguration);
+  }
+
+  public LocalStreamBuilder(LocalRuntimeConfiguration streamConfig) {
+    this.streamConfig = streamConfig;
+    this.providers = new HashMap<String, StreamComponent>();
+    this.components = new HashMap<String, StreamComponent>();
+    this.totalTasks = 0;
+    this.monitorTasks = 0;
+    this.futures = new HashMap<>();
+  }
+
+  public void prepare() {
+    this.streamIdentifier = streamConfig.getIdentifier();
+    this.streamConfig.setStartedAt(startedAt.getMillis());
+    final LocalStreamBuilder self = this;
+    this.shutdownHook = new Thread() {
+      @Override
+      public void run() {
+        LOGGER.debug("Shutdown hook received.  Beginning shutdown");
+        self.stopInternal(true);
+      }
+    };
+    this.useDeprecatedMonitors = false;
+    this.broadcastMonitor = new BroadcastMonitorThread(this.streamConfig.getMonitoring());
+  }
+
+  public void setUseDeprecatedMonitors(boolean useDeprecatedMonitors) {
+    this.useDeprecatedMonitors = useDeprecatedMonitors;
+  }
+
+  @Override
+  public StreamBuilder newPerpetualStream(String id, StreamsProvider provider) {
+    validateId(id);
+    this.providers.put(id, new StreamComponent(id, provider, true, streamConfig));
+    ++this.totalTasks;
+    if(this.useDeprecatedMonitors && provider instanceof DatumStatusCountable )
+      ++this.monitorTasks;
+    return this;
+  }
+
+  @Override
+  public StreamBuilder newReadCurrentStream(String id, StreamsProvider provider) {
+    validateId(id);
+    this.providers.put(id, new StreamComponent(id, provider, false, streamConfig));
+    ++this.totalTasks;
+    if(this.useDeprecatedMonitors && provider instanceof DatumStatusCountable )
+      ++this.monitorTasks;
+    return this;
+  }
+
+  @Override
+  public StreamBuilder newReadNewStream(String id, StreamsProvider provider, BigInteger sequence) {
+    validateId(id);
+    this.providers.put(id, new StreamComponent(id, provider, sequence, streamConfig));
+    ++this.totalTasks;
+    if(this.useDeprecatedMonitors && provider instanceof DatumStatusCountable )
+      ++this.monitorTasks;
+    return this;
+  }
+
+  @Override
+  public StreamBuilder newReadRangeStream(String id, StreamsProvider provider, DateTime start, DateTime end) {
+    validateId(id);
+    this.providers.put(id, new StreamComponent(id, provider, start, end, streamConfig));
+    ++this.totalTasks;
+    if(this.useDeprecatedMonitors && provider instanceof DatumStatusCountable )
+      ++this.monitorTasks;
+    return this;
+  }
+
+  @Override
+  public StreamBuilder setStreamsConfiguration(StreamsConfiguration configuration) {
+    streamConfig = StreamsJacksonMapper.getInstance().convertValue(configuration, LocalRuntimeConfiguration.class);
+    return this;
+  }
+
+  @Override
+  public StreamsConfiguration getStreamsConfiguration() {
+    return StreamsJacksonMapper.getInstance().convertValue(streamConfig, StreamsConfiguration.class);
+  }
+
+  @Override
+  public StreamBuilder addStreamsProcessor(String id, StreamsProcessor processor, int numTasks, String... inBoundIds) {
+    validateId(id);
+    StreamComponent comp = new StreamComponent(id, processor, new ThroughputQueue<StreamsDatum>(this.maxQueueCapacity, id, streamIdentifier, startedAt.getMillis()), numTasks, streamConfig);
+    this.components.put(id, comp);
+    connectToOtherComponents(inBoundIds, comp);
+    this.totalTasks += numTasks;
+    if(this.useDeprecatedMonitors && processor instanceof DatumStatusCountable )
+      ++this.monitorTasks;
+    return this;
+  }
+
+  @Override
+  public StreamBuilder addStreamsPersistWriter(String id, StreamsPersistWriter writer, int numTasks, String... inBoundIds) {
+    validateId(id);
+    StreamComponent comp = new StreamComponent(id, writer, new ThroughputQueue<StreamsDatum>(this.maxQueueCapacity, id, streamIdentifier, startedAt.getMillis()), numTasks, streamConfig);
+    this.components.put(id, comp);
+    connectToOtherComponents(inBoundIds, comp);
+    this.totalTasks += numTasks;
+    if(this.useDeprecatedMonitors && writer instanceof DatumStatusCountable )
+      ++this.monitorTasks;
+    return this;
+  }
+
+  /**
+   * Runs the data stream in the this JVM and blocks till completion.
+   */
+  @Override
+  public void start() {
+    prepare();
+    attachShutdownHandler();
+    boolean isRunning = true;
+    this.executor = new ShutdownStreamOnUnhandleThrowableThreadPoolExecutor(this.totalTasks, this);
+    this.monitor = Executors.newCachedThreadPool();
+    Map<String, StreamsProviderTask> provTasks = new HashMap<String, StreamsProviderTask>();
+    tasks = new HashMap<String, List<StreamsTask>>();
+    boolean forcedShutDown = false;
+
+    try {
+      if (this.useDeprecatedMonitors) {
+        monitorThread = new LocalStreamProcessMonitorThread(executor, 10);
+        this.monitor.submit(monitorThread);
+      }
+      setupComponentTasks(tasks);
+      setupProviderTasks(provTasks);
+      LOGGER.info("Started stream with {} components", tasks.size());
+      while(isRunning) {
+        Uninterruptibles.sleepUninterruptibly(streamConfig.getShutdownCheckDelay(), TimeUnit.MILLISECONDS);
+        isRunning = false;
+        for(StreamsProviderTask task : provTasks.values()) {
+          isRunning = isRunning || task.isRunning();
         }
-    }
-
-    public LocalStreamBuilder(LocalRuntimeConfiguration streamConfig) {
-        this.streamConfig = streamConfig;
-        this.providers = new HashMap<String, StreamComponent>();
-        this.components = new HashMap<String, StreamComponent>();
-        this.totalTasks = 0;
-        this.monitorTasks = 0;
-        this.futures = new HashMap<>();
-    }
-
-    public void prepare() {
-        this.streamIdentifier = streamConfig.getIdentifier();
-        this.streamConfig.setStartedAt(startedAt.getMillis());
-        final LocalStreamBuilder self = this;
-        this.shutdownHook = new Thread() {
-            @Override
-            public void run() {
-                LOGGER.debug("Shutdown hook received.  Beginning shutdown");
-                self.stopInternal(true);
+        for(StreamComponent task: components.values()) {
+          boolean tasksRunning = false;
+          for(StreamsTask t : task.getStreamsTasks()) {
+            if(t instanceof BaseStreamsTask) {
+              tasksRunning = tasksRunning || ((BaseStreamsTask) t).isRunning();
             }
-        };
-        this.useDeprecatedMonitors = false;
-        this.broadcastMonitor = new BroadcastMonitorThread(this.streamConfig.getMonitoring());
-    }
-
-    public void setUseDeprecatedMonitors(boolean useDeprecatedMonitors) {
-        this.useDeprecatedMonitors = useDeprecatedMonitors;
-    }
-
-    @Override
-    public StreamBuilder newPerpetualStream(String id, StreamsProvider provider) {
-        validateId(id);
-        this.providers.put(id, new StreamComponent(id, provider, true, streamConfig));
-        ++this.totalTasks;
-        if(this.useDeprecatedMonitors && provider instanceof DatumStatusCountable )
-            ++this.monitorTasks;
-        return this;
-    }
-
-    @Override
-    public StreamBuilder newReadCurrentStream(String id, StreamsProvider provider) {
-        validateId(id);
-        this.providers.put(id, new StreamComponent(id, provider, false, streamConfig));
-        ++this.totalTasks;
-        if(this.useDeprecatedMonitors && provider instanceof DatumStatusCountable )
-            ++this.monitorTasks;
-        return this;
-    }
-
-    @Override
-    public StreamBuilder newReadNewStream(String id, StreamsProvider provider, BigInteger sequence) {
-        validateId(id);
-        this.providers.put(id, new StreamComponent(id, provider, sequence, streamConfig));
-        ++this.totalTasks;
-        if(this.useDeprecatedMonitors && provider instanceof DatumStatusCountable )
-            ++this.monitorTasks;
-        return this;
-    }
-
-    @Override
-    public StreamBuilder newReadRangeStream(String id, StreamsProvider provider, DateTime start, DateTime end) {
-        validateId(id);
-        this.providers.put(id, new StreamComponent(id, provider, start, end, streamConfig));
-        ++this.totalTasks;
-        if(this.useDeprecatedMonitors && provider instanceof DatumStatusCountable )
-            ++this.monitorTasks;
-        return this;
-    }
-
-    @Override
-    public StreamBuilder setStreamsConfiguration(StreamsConfiguration configuration) {
-        streamConfig = StreamsJacksonMapper.getInstance().convertValue(configuration, LocalRuntimeConfiguration.class);
-        return this;
-    }
-
-    @Override
-    public StreamsConfiguration getStreamsConfiguration() {
-        return StreamsJacksonMapper.getInstance().convertValue(streamConfig, StreamsConfiguration.class);
+          }
+          isRunning = isRunning || (tasksRunning && task.getInBoundQueue().size() > 0);
+        }
+        if(isRunning) {
+          Uninterruptibles.sleepUninterruptibly(streamConfig.getShutdownCheckInterval(), TimeUnit.MILLISECONDS);
+        }
+      }
+      LOGGER.info("Components are no longer running or timed out");
+    } catch (Exception e){
+      LOGGER.warn("Runtime exception.  Beginning shutdown");
+      forcedShutDown = true;
+    } finally{
+      LOGGER.info("Stream has completed, pausing @ {}", System.currentTimeMillis());
+      Uninterruptibles.sleepUninterruptibly(streamConfig.getShutdownPauseMs(), TimeUnit.MILLISECONDS);
+      LOGGER.info("Stream has completed, shutting down @ {}", System.currentTimeMillis());
+      stopInternal(forcedShutDown);
     }
 
-    @Override
-    public StreamBuilder addStreamsProcessor(String id, StreamsProcessor processor, int numTasks, String... inBoundIds) {
-        validateId(id);
-        StreamComponent comp = new StreamComponent(id, processor, new ThroughputQueue<StreamsDatum>(this.maxQueueCapacity, id, streamIdentifier, startedAt.getMillis()), numTasks, streamConfig);
-        this.components.put(id, comp);
-        connectToOtherComponents(inBoundIds, comp);
-        this.totalTasks += numTasks;
-        if(this.useDeprecatedMonitors && processor instanceof DatumStatusCountable )
-            ++this.monitorTasks;
-        return this;
+  }
+
+  private void attachShutdownHandler() {
+    LOGGER.debug("Attaching shutdown handler");
+    Runtime.getRuntime().addShutdownHook(shutdownHook);
+  }
+
+  private void detachShutdownHandler() {
+    LOGGER.debug("Detaching shutdown handler");
+    Runtime.getRuntime().removeShutdownHook(shutdownHook);
+  }
+
+  protected void forceShutdown(Map<String, List<StreamsTask>> streamsTasks) {
+    LOGGER.debug("Shutdown failed.  Forcing shutdown");
+    for(List<StreamsTask> tasks : streamsTasks.values()) {
+      for(StreamsTask task : tasks) {
+        task.stopTask();
+        if(task.isWaiting()) {
+          this.futures.get(task).cancel(true);
+        }
+      }
     }
-
-    @Override
-    public StreamBuilder addStreamsPersistWriter(String id, StreamsPersistWriter writer, int numTasks, String... inBoundIds) {
-        validateId(id);
-        StreamComponent comp = new StreamComponent(id, writer, new ThroughputQueue<StreamsDatum>(this.maxQueueCapacity, id, streamIdentifier, startedAt.getMillis()), numTasks, streamConfig);
-        this.components.put(id, comp);
-        connectToOtherComponents(inBoundIds, comp);
-        this.totalTasks += numTasks;
-        if(this.useDeprecatedMonitors && writer instanceof DatumStatusCountable )
-            ++this.monitorTasks;
-        return this;
+    this.executor.shutdown();
+    this.monitor.shutdown();
+    try {
+      if(!this.executor.awaitTermination(streamConfig.getExecutorShutdownPauseMs(), TimeUnit.MILLISECONDS)){
+        this.executor.shutdownNow();
+      }
+      if(!this.monitor.awaitTermination(streamConfig.getMonitorShutdownPauseMs(), TimeUnit.MILLISECONDS)){
+        this.monitor.shutdownNow();
+      }
+    }catch (InterruptedException ie) {
+      this.executor.shutdownNow();
+      this.monitor.shutdownNow();
+      throw new RuntimeException(ie);
     }
+  }
 
-    /**
-     * Runs the data stream in the this JVM and blocks till completion.
-     */
-    @Override
-    public void start() {
-        prepare();
-        attachShutdownHandler();
-        boolean isRunning = true;
-        this.executor = new ShutdownStreamOnUnhandleThrowableThreadPoolExecutor(this.totalTasks, this);
-        this.monitor = Executors.newCachedThreadPool();
-        Map<String, StreamsProviderTask> provTasks = new HashMap<String, StreamsProviderTask>();
-        tasks = new HashMap<String, List<StreamsTask>>();
-        boolean forcedShutDown = false;
-
-        try {
-            if (this.useDeprecatedMonitors) {
-                monitorThread = new LocalStreamProcessMonitorThread(executor, 10);
-                this.monitor.submit(monitorThread);
-            }
-            setupComponentTasks(tasks);
-            setupProviderTasks(provTasks);
-            LOGGER.info("Started stream with {} components", tasks.size());
-            while(isRunning) {
-                Uninterruptibles.sleepUninterruptibly(streamConfig.getShutdownCheckDelay(), TimeUnit.MILLISECONDS);
-                isRunning = false;
-                for(StreamsProviderTask task : provTasks.values()) {
-                    isRunning = isRunning || task.isRunning();
-                }
-                for(StreamComponent task: components.values()) {
-                    boolean tasksRunning = false;
-                    for(StreamsTask t : task.getStreamsTasks()) {
-                        if(t instanceof BaseStreamsTask) {
-                            tasksRunning = tasksRunning || ((BaseStreamsTask) t).isRunning();
-                        }
-                    }
-                    isRunning = isRunning || (tasksRunning && task.getInBoundQueue().size() > 0);
-                }
-                if(isRunning) {
-                    Uninterruptibles.sleepUninterruptibly(streamConfig.getShutdownCheckInterval(), TimeUnit.MILLISECONDS);
-                }
-            }
-            LOGGER.info("Components are no longer running or timed out");
-        } catch (Exception e){
-            LOGGER.warn("Runtime exception.  Beginning shutdown");
-            forcedShutDown = true;
-        } finally{
-            LOGGER.info("Stream has completed, pausing @ {}", System.currentTimeMillis());
-            Uninterruptibles.sleepUninterruptibly(streamConfig.getShutdownPauseMs(), TimeUnit.MILLISECONDS);
-            LOGGER.info("Stream has completed, shutting down @ {}", System.currentTimeMillis());
-            stopInternal(forcedShutDown);
-        }
-
+  protected void shutdown(Map<String, List<StreamsTask>> streamsTasks) throws InterruptedException {
+    LOGGER.info("Attempting to shutdown tasks");
+    if (this.monitorThread != null) {
+      this.monitorThread.shutdown();
     }
-
-    private void attachShutdownHandler() {
-        LOGGER.debug("Attaching shutdown handler");
-        Runtime.getRuntime().addShutdownHook(shutdownHook);
+    this.executor.shutdown();
+    //complete stream shut down gracfully
+    for(StreamComponent prov : this.providers.values()) {
+      shutDownTask(prov, streamsTasks);
     }
-
-    private void detachShutdownHandler() {
-        LOGGER.debug("Detaching shutdown handler");
-        Runtime.getRuntime().removeShutdownHook(shutdownHook);
+    //need to make this configurable
+    if(!this.executor.awaitTermination(streamConfig.getExecutorShutdownWaitMs(), TimeUnit.MILLISECONDS)) { // all threads should have terminated already.
+      this.executor.shutdownNow();
+      this.executor.awaitTermination(streamConfig.getExecutorShutdownWaitMs(), TimeUnit.MILLISECONDS);
     }
-
-    protected void forceShutdown(Map<String, List<StreamsTask>> streamsTasks) {
-        LOGGER.debug("Shutdown failed.  Forcing shutdown");
-        for(List<StreamsTask> tasks : streamsTasks.values()) {
-            for(StreamsTask task : tasks) {
-                task.stopTask();
-                if(task.isWaiting()) {
-                    this.futures.get(task).cancel(true);
-                }
-            }
-        }
-        this.executor.shutdown();
-        this.monitor.shutdown();
-        try {
-            if(!this.executor.awaitTermination(streamConfig.getExecutorShutdownPauseMs(), TimeUnit.MILLISECONDS)){
-                this.executor.shutdownNow();
-            }
-            if(!this.monitor.awaitTermination(streamConfig.getMonitorShutdownPauseMs(), TimeUnit.MILLISECONDS)){
-                this.monitor.shutdownNow();
-            }
-        }catch (InterruptedException ie) {
-            this.executor.shutdownNow();
-            this.monitor.shutdownNow();
-            throw new RuntimeException(ie);
-        }
+    if(!this.monitor.awaitTermination(streamConfig.getMonitorShutdownWaitMs(), TimeUnit.MILLISECONDS)) { // all threads should have terminated already.
+      this.monitor.shutdownNow();
+      this.monitor.awaitTermination(streamConfig.getMonitorShutdownWaitMs(), TimeUnit.MILLISECONDS);
     }
-
-    protected void shutdown(Map<String, List<StreamsTask>> streamsTasks) throws InterruptedException {
-        LOGGER.info("Attempting to shutdown tasks");
-        if (this.monitorThread != null) {
-            this.monitorThread.shutdown();
-        }
-        this.executor.shutdown();
-        //complete stream shut down gracfully
-        for(StreamComponent prov : this.providers.values()) {
-            shutDownTask(prov, streamsTasks);
-        }
-        //need to make this configurable
-        if(!this.executor.awaitTermination(streamConfig.getExecutorShutdownWaitMs(), TimeUnit.MILLISECONDS)) { // all threads should have terminated already.
-            this.executor.shutdownNow();
-            this.executor.awaitTermination(streamConfig.getExecutorShutdownWaitMs(), TimeUnit.MILLISECONDS);
-        }
-        if(!this.monitor.awaitTermination(streamConfig.getMonitorShutdownWaitMs(), TimeUnit.MILLISECONDS)) { // all threads should have terminated already.
-            this.monitor.shutdownNow();
-            this.monitor.awaitTermination(streamConfig.getMonitorShutdownWaitMs(), TimeUnit.MILLISECONDS);
-        }
+  }
+
+  protected void setupProviderTasks(Map<String, StreamsProviderTask> provTasks) {
+    for(StreamComponent prov : this.providers.values()) {
+      StreamsTask task = prov.createConnectedTask(getTimeout());
+      task.setStreamConfig(this.streamConfig);
+      StreamsTaskCounter counter = new StreamsTaskCounter(prov.getId(), streamIdentifier, startedAt.getMillis());
+      task.setStreamsTaskCounter(counter);
+      this.executor.submit(task);
+      provTasks.put(prov.getId(), (StreamsProviderTask) task);
+      if(this.useDeprecatedMonitors && prov.isOperationCountable() ) {
+        this.monitor.submit(new StatusCounterMonitorThread((DatumStatusCountable) prov.getOperation(), 10));
+        this.monitor.submit(new StatusCounterMonitorThread((DatumStatusCountable) task, 10));
+      }
     }
-
-    protected void setupProviderTasks(Map<String, StreamsProviderTask> provTasks) {
-        for(StreamComponent prov : this.providers.values()) {
-            StreamsTask task = prov.createConnectedTask(getTimeout());
-            task.setStreamConfig(this.streamConfig);
-            StreamsTaskCounter counter = new StreamsTaskCounter(prov.getId(), streamIdentifier, startedAt.getMillis());
-            task.setStreamsTaskCounter(counter);
-            this.executor.submit(task);
-            provTasks.put(prov.getId(), (StreamsProviderTask) task);
-            if(this.useDeprecatedMonitors && prov.isOperationCountable() ) {
-                this.monitor.submit(new StatusCounterMonitorThread((DatumStatusCountable) prov.getOperation(), 10));
-                this.monitor.submit(new StatusCounterMonitorThread((DatumStatusCountable) task, 10));
-            }
+  }
+
+  protected void setupComponentTasks(Map<String, List<StreamsTask>> streamsTasks) {
+    for(StreamComponent comp : this.components.values()) {
+      int tasks = comp.getNumTasks();
+      List<StreamsTask> compTasks = new LinkedList<StreamsTask>();
+      StreamsTaskCounter counter = new StreamsTaskCounter(comp.getId(), streamIdentifier, startedAt.getMillis());
+      for(int i=0; i < tasks; ++i) {
+        StreamsTask task = comp.createConnectedTask(getTimeout());
+        task.setStreamsTaskCounter(counter);
+        task.setStreamConfig(this.streamConfig);
+        this.futures.put(task, this.executor.submit(task));
+        compTasks.add(task);
+        if(this.useDeprecatedMonitors &&  comp.isOperationCountable() ) {
+          this.monitor.submit(new StatusCounterMonitorThread((DatumStatusCountable) comp.getOperation(), 10));
+          this.monitor.submit(new StatusCounterMonitorThread((DatumStatusCountable) task, 10));
         }
+        this.monitor.submit(broadcastMonitor);
+      }
+      streamsTasks.put(comp.getId(), compTasks);
     }
-
-    protected void setupComponentTasks(Map<String, List<StreamsTask>> streamsTasks) {
-        for(StreamComponent comp : this.components.values()) {
-            int tasks = comp.getNumTasks();
-            List<StreamsTask> compTasks = new LinkedList<StreamsTask>();
-            StreamsTaskCounter counter = new StreamsTaskCounter(comp.getId(), streamIdentifier, startedAt.getMillis());
-            for(int i=0; i < tasks; ++i) {
-                StreamsTask task = comp.createConnectedTask(getTimeout());
-                task.setStreamsTaskCounter(counter);
-                task.setStreamConfig(this.streamConfig);
-                this.futures.put(task, this.executor.submit(task));
-                compTasks.add(task);
-                if(this.useDeprecatedMonitors &&  comp.isOperationCountable() ) {
-                    this.monitor.submit(new StatusCounterMonitorThread((DatumStatusCountable) comp.getOperation(), 10));
-                    this.monitor.submit(new StatusCounterMonitorThread((DatumStatusCountable) task, 10));
-                }
-                this.monitor.submit(broadcastMonitor);
-            }
-            streamsTasks.put(comp.getId(), compTasks);
+  }
+
+  /**
+   * Shutsdown the running tasks in sudo depth first search kind of way. Checks that the upstream components have
+   * finished running before shutting down. Waits till inbound queue is empty to shutdown.
+   * @param comp StreamComponent to shut down.
+   * @param streamTasks the list of non-StreamsProvider tasks for this stream.
+   * @throws InterruptedException
+   */
+  private void shutDownTask(StreamComponent comp, Map<String, List<StreamsTask>> streamTasks) throws InterruptedException {
+    List<StreamsTask> tasks = streamTasks.get(comp.getId());
+    if(tasks != null) { //not a StreamProvider
+      boolean parentsShutDown = true;
+      for(StreamComponent parent : comp.getUpStreamComponents()) {
+        List<StreamsTask> parentTasks = streamTasks.get(parent.getId());
+        //if parentTask == null, its a provider and is not running anymore
+        if(parentTasks != null) {
+          for(StreamsTask task : parentTasks) {
+            parentsShutDown = parentsShutDown && !task.isRunning();
+          }
         }
-    }
-
-    /**
-     * Shutsdown the running tasks in sudo depth first search kind of way. Checks that the upstream components have
-     * finished running before shutting down. Waits till inbound queue is empty to shutdown.
-     * @param comp StreamComponent to shut down.
-     * @param streamTasks the list of non-StreamsProvider tasks for this stream.
-     * @throws InterruptedException
-     */
-    private void shutDownTask(StreamComponent comp, Map<String, List<StreamsTask>> streamTasks) throws InterruptedException {
-        List<StreamsTask> tasks = streamTasks.get(comp.getId());
-        if(tasks != null) { //not a StreamProvider
-            boolean parentsShutDown = true;
-            for(StreamComponent parent : comp.getUpStreamComponents()) {
-                List<StreamsTask> parentTasks = streamTasks.get(parent.getId());
-                //if parentTask == null, its a provider and is not running anymore
-                if(parentTasks != null) {
-                    for(StreamsTask task : parentTasks) {
-                        parentsShutDown = parentsShutDown && !task.isRunning();
-                    }
-                }
-            }
-            if(parentsShutDown) {
-                for(StreamsTask task : tasks) {
-                    task.stopTask();
-                    if(task.isWaiting()) {
-                        this.futures.get(task).cancel(true); // no data to process, interrupt block queue
-                    }
-                }
-                for(StreamsTask task : tasks) {
-                    int count = 0;
-                    while(count < streamConfig.getTaskTimeoutMs() / 1000 && task.isRunning()) {
-                        Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
-                        count++;
-                    }
-
-                    if(task.isRunning()) {
-                        LOGGER.warn("Task {} failed to terminate in allotted timeframe", task.toString());
-                    }
-                }
-            }
+      }
+      if(parentsShutDown) {
+        for(StreamsTask task : tasks) {
+          task.stopTask();
+          if(task.isWaiting()) {
+            this.futures.get(task).cancel(true); // no data to process, interrupt block queue
+          }
         }
-        Collection<StreamComponent> children = comp.getDownStreamComponents();
-        if(children != null) {
-            for(StreamComponent child : comp.getDownStreamComponents()) {
-                shutDownTask(child, streamTasks);
-            }
+        for(StreamsTask task : tasks) {
+          int count = 0;
+          while(count < streamConfig.getTaskTimeoutMs() / 1000 && task.isRunning()) {
+            Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
+            count++;
+          }
+
+          if(task.isRunning()) {
+            LOGGER.warn("Task {} failed to terminate in allotted timeframe", task.toString());
+          }
         }
+      }
     }
-
-    /**
-     * NOT IMPLEMENTED.
-     */
-    @Override
-    public void stop() {
-        stopInternal(false);
-    }
-
-
-
-    protected void stopInternal(boolean systemExiting) {
-        try {
-            shutdown(tasks);
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to shutdown Stream: {}", e);
-            forceShutdown(tasks);
-        } finally {
-            try {
-                 if(!systemExiting) {
-                      detachShutdownHandler();
-                  }
-               } catch( Throwable e3 ) {
-                 LOGGER.error("StopInternal caught Throwable: {}", e3);
-                       System.exit(1);
-            }
-        }
+    Collection<StreamComponent> children = comp.getDownStreamComponents();
+    if(children != null) {
+      for(StreamComponent child : comp.getDownStreamComponents()) {
+        shutDownTask(child, streamTasks);
+      }
     }
-
-    private void connectToOtherComponents(String[] conntectToIds, StreamComponent toBeConnected) {
-        for(String id : conntectToIds) {
-            StreamComponent upStream = null;
-            if(this.providers.containsKey(id)) {
-                upStream = this.providers.get(id);
-            }
-            else if(this.components.containsKey(id)) {
-                upStream = this.components.get(id);
-            }
-            else {
-                throw new InvalidStreamException("Cannot connect to id, "+id+", because id does not exist.");
-            }
-            upStream.addOutBoundQueue(toBeConnected, toBeConnected.getInBoundQueue());
-            toBeConnected.addInboundQueue(upStream);
+  }
+
+  /**
+   * NOT IMPLEMENTED.
+   */
+  @Override
+  public void stop() {
+    stopInternal(false);
+  }
+
+
+
+  protected void stopInternal(boolean systemExiting) {
+    try {
+      shutdown(tasks);
+    } catch (Exception e) {
+      LOGGER.error("Exception while trying to shutdown Stream: {}", e);
+      forceShutdown(tasks);
+    } finally {
+      try {
+        if(!systemExiting) {
+          detachShutdownHandler();
         }
+      } catch( Throwable e3 ) {
+        LOGGER.error("StopInternal caught Throwable: {}", e3);
+        System.exit(1);
+      }
     }
-
-    private void validateId(String id) {
-        if(this.providers.containsKey(id) || this.components.containsKey(id)) {
-            throw new InvalidStreamException("Duplicate id. "+id+" is already assigned to another component");
-        } else if(id.contains(":")) {
-            throw new InvalidStreamException("Invalid character, ':', in component id : "+id);
-        }
+  }
+
+  private void connectToOtherComponents(String[] conntectToIds, StreamComponent toBeConnected) {
+    for(String id : conntectToIds) {
+      StreamComponent upStream = null;
+      if(this.providers.containsKey(id)) {
+        upStream = this.providers.get(id);
+      }
+      else if(this.components.containsKey(id)) {
+        upStream = this.components.get(id);
+      }
+      else {
+        throw new InvalidStreamException("Cannot connect to id, "+id+", because id does not exist.");
+      }
+      upStream.addOutBoundQueue(toBeConnected, toBeConnected.getInBoundQueue());
+      toBeConnected.addInboundQueue(upStream);
     }
+  }
 
-    protected int getTimeout() {
-        //Set the timeout of it is configured, otherwise signal downstream components to use their default
-        return streamConfig.getProviderTimeoutMs().intValue();
+  private void validateId(String id) {
+    if(this.providers.containsKey(id) || this.components.containsKey(id)) {
+      throw new InvalidStreamException("Duplicate id. "+id+" is already assigned to another component");
+    } else if(id.contains(":")) {
+      throw new InvalidStreamException("Invalid character, ':', in component id : "+id);
     }
-
-    private LocalRuntimeConfiguration convertConfiguration(Map<String, Object> streamConfig) {
-        LocalRuntimeConfiguration config = new LocalRuntimeConfiguration();
-        if( streamConfig != null ) {
-            for( Map.Entry<String, Object> item : streamConfig.entrySet() ) {
-                config.setAdditionalProperty(item.getKey(), item.getValue());
-            }
-        }
-        return config;
+  }
+
+  protected int getTimeout() {
+    //Set the timeout of it is configured, otherwise signal downstream components to use their default
+    return streamConfig.getProviderTimeoutMs().intValue();
+  }
+
+  private LocalRuntimeConfiguration convertConfiguration(Map<String, Object> streamConfig) {
+    LocalRuntimeConfiguration config = new LocalRuntimeConfiguration();
+    if( streamConfig != null ) {
+      for( Map.Entry<String, Object> item : streamConfig.entrySet() ) {
+        config.setAdditionalProperty(item.getKey(), item.getValue());
+      }
     }
+    return config;
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/StreamComponent.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/StreamComponent.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/StreamComponent.java
index 9d602cd..75799b7 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/StreamComponent.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/builders/StreamComponent.java
@@ -18,19 +18,30 @@
 
 package org.apache.streams.local.builders;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.core.*;
+import org.apache.streams.core.DatumStatusCountable;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsOperation;
+import org.apache.streams.core.StreamsPersistWriter;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.local.tasks.StreamsPersistWriterTask;
 import org.apache.streams.local.tasks.StreamsProcessorTask;
 import org.apache.streams.local.tasks.StreamsProviderTask;
 import org.apache.streams.local.tasks.StreamsTask;
 import org.apache.streams.util.SerializationUtil;
+
+import com.google.common.collect.Lists;
 import org.joda.time.DateTime;
 
 import java.io.Serializable;
 import java.math.BigInteger;
-import java.util.*;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 
 /**
@@ -39,263 +50,263 @@ import java.util.concurrent.BlockingQueue;
  */
 public class StreamComponent implements Serializable {
 
-    private static final int START = 1;
-    private static final int END = 2;
+  private static final int START = 1;
+  private static final int END = 2;
 
-    private String id;
-    private Set<StreamComponent> inBound;
-    private Map<StreamComponent, BlockingQueue<StreamsDatum>> outBound;
-    private BlockingQueue<StreamsDatum> inQueue;
-    private StreamsProvider provider;
-    private StreamsProcessor processor;
-    private StreamsPersistWriter writer;
-    private DateTime[] dateRange;
-    private BigInteger sequence;
-    private int numTasks = 1;
-    private boolean perpetual;
+  private String id;
+  private Set<StreamComponent> inBound;
+  private Map<StreamComponent, BlockingQueue<StreamsDatum>> outBound;
+  private BlockingQueue<StreamsDatum> inQueue;
+  private StreamsProvider provider;
+  private StreamsProcessor processor;
+  private StreamsPersistWriter writer;
+  private DateTime[] dateRange;
+  private BigInteger sequence;
+  private int numTasks = 1;
+  private boolean perpetual;
 
-    private List<StreamsTask> tasks;
+  private List<StreamsTask> tasks;
 
-    private StreamsConfiguration streamConfig;
+  private StreamsConfiguration streamConfig;
 
-    /**
-     *
-     * @param id
-     * @param provider
-     */
-    public StreamComponent(String id, StreamsProvider provider, boolean perpetual, StreamsConfiguration streamConfig) {
-        this.id = id;
-        this.provider = provider;
-        this.perpetual = perpetual;
-        this.streamConfig = streamConfig;
-        initializePrivateVariables();
-    }
+  /**
+   *
+   * @param id
+   * @param provider
+   */
+  public StreamComponent(String id, StreamsProvider provider, boolean perpetual, StreamsConfiguration streamConfig) {
+    this.id = id;
+    this.provider = provider;
+    this.perpetual = perpetual;
+    this.streamConfig = streamConfig;
+    initializePrivateVariables();
+  }
 
-    /**
-     *
-     * @param id
-     * @param provider
-     * @param start
-     * @param end
-     */
-    public StreamComponent(String id, StreamsProvider provider, DateTime start, DateTime end, StreamsConfiguration streamConfig) {
-        this.id = id;
-        this.provider = provider;
-        this.dateRange = new DateTime[2];
-        this.dateRange[START] = start;
-        this.dateRange[END] = end;
-        this.streamConfig = streamConfig;
-        initializePrivateVariables();
-    }
+  /**
+   *
+   * @param id
+   * @param provider
+   * @param start
+   * @param end
+   */
+  public StreamComponent(String id, StreamsProvider provider, DateTime start, DateTime end, StreamsConfiguration streamConfig) {
+    this.id = id;
+    this.provider = provider;
+    this.dateRange = new DateTime[2];
+    this.dateRange[START] = start;
+    this.dateRange[END] = end;
+    this.streamConfig = streamConfig;
+    initializePrivateVariables();
+  }
 
 
-    /**
-     *
-     * @param id
-     * @param provider
-     * @param sequence
-     */
-    public StreamComponent(String id, StreamsProvider provider, BigInteger sequence, StreamsConfiguration streamConfig) {
-        this.id = id;
-        this.provider = provider;
-        this.sequence = sequence;
-        this.streamConfig = streamConfig;
-    }
+  /**
+   *
+   * @param id
+   * @param provider
+   * @param sequence
+   */
+  public StreamComponent(String id, StreamsProvider provider, BigInteger sequence, StreamsConfiguration streamConfig) {
+    this.id = id;
+    this.provider = provider;
+    this.sequence = sequence;
+    this.streamConfig = streamConfig;
+  }
 
-    /**
-     *
-     * @param id
-     * @param processor
-     * @param inQueue
-     * @param numTasks
-     */
-    public StreamComponent(String id, StreamsProcessor processor, BlockingQueue<StreamsDatum> inQueue, int numTasks, StreamsConfiguration streamConfig) {
-        this.id = id;
-        this.processor = processor;
-        this.inQueue = inQueue;
-        this.numTasks = numTasks;
-        this.streamConfig = streamConfig;
-        initializePrivateVariables();
-    }
+  /**
+   *
+   * @param id
+   * @param processor
+   * @param inQueue
+   * @param numTasks
+   */
+  public StreamComponent(String id, StreamsProcessor processor, BlockingQueue<StreamsDatum> inQueue, int numTasks, StreamsConfiguration streamConfig) {
+    this.id = id;
+    this.processor = processor;
+    this.inQueue = inQueue;
+    this.numTasks = numTasks;
+    this.streamConfig = streamConfig;
+    initializePrivateVariables();
+  }
 
-    /**
-     *
-     * @param id
-     * @param writer
-     * @param inQueue
-     * @param numTasks
-     */
-    public StreamComponent(String id, StreamsPersistWriter writer, BlockingQueue<StreamsDatum> inQueue, int numTasks, StreamsConfiguration streamConfig) {
-        this.id = id;
-        this.writer = writer;
-        this.inQueue = inQueue;
-        this.numTasks = numTasks;
-        this.streamConfig = streamConfig;
-        initializePrivateVariables();
-    }
+  /**
+   *
+   * @param id
+   * @param writer
+   * @param inQueue
+   * @param numTasks
+   */
+  public StreamComponent(String id, StreamsPersistWriter writer, BlockingQueue<StreamsDatum> inQueue, int numTasks, StreamsConfiguration streamConfig) {
+    this.id = id;
+    this.writer = writer;
+    this.inQueue = inQueue;
+    this.numTasks = numTasks;
+    this.streamConfig = streamConfig;
+    initializePrivateVariables();
+  }
 
-    private void initializePrivateVariables() {
-        this.inBound = new HashSet<StreamComponent>();
-        this.outBound = new HashMap<StreamComponent, BlockingQueue<StreamsDatum>>();
-        this.tasks = Lists.newArrayList();
-    }
+  private void initializePrivateVariables() {
+    this.inBound = new HashSet<StreamComponent>();
+    this.outBound = new HashMap<StreamComponent, BlockingQueue<StreamsDatum>>();
+    this.tasks = Lists.newArrayList();
+  }
 
-    /**
-     * Add an outbound queue for this component. The queue should be an inbound queue of a downstream component.
-     * @param component the component that this supplying their inbound queue
-     * @param queue the queue to to put post processed/provided datums on
-     */
-    public void addOutBoundQueue(StreamComponent component, BlockingQueue<StreamsDatum> queue) {
-        this.outBound.put(component, queue);
-    }
+  /**
+   * Add an outbound queue for this component. The queue should be an inbound queue of a downstream component.
+   * @param component the component that this supplying their inbound queue
+   * @param queue the queue to to put post processed/provided datums on
+   */
+  public void addOutBoundQueue(StreamComponent component, BlockingQueue<StreamsDatum> queue) {
+    this.outBound.put(component, queue);
+  }
 
-    /**
-     * Add a component that supplies data through the inbound queue.
-     * @param component that supplies data through the inbound queue
-     */
-    public void addInboundQueue(StreamComponent component) {
-        this.inBound.add(component);
-    }
+  /**
+   * Add a component that supplies data through the inbound queue.
+   * @param component that supplies data through the inbound queue
+   */
+  public void addInboundQueue(StreamComponent component) {
+    this.inBound.add(component);
+  }
 
-    /**
-     * The components that are immediately downstream of this component (aka child nodes)
-     * @return Collection of child nodes of this component
-     */
-    public Collection<StreamComponent> getDownStreamComponents() {
-        return this.outBound.keySet();
-    }
+  /**
+   * The components that are immediately downstream of this component (aka child nodes)
+   * @return Collection of child nodes of this component
+   */
+  public Collection<StreamComponent> getDownStreamComponents() {
+    return this.outBound.keySet();
+  }
 
-    /**
-     * The components that are immediately upstream of this component (aka parent nodes)
-     * @return Collection of parent nodes of this component
-     */
-    public Collection<StreamComponent> getUpStreamComponents() {
-        return this.inBound;
-    }
+  /**
+   * The components that are immediately upstream of this component (aka parent nodes)
+   * @return Collection of parent nodes of this component
+   */
+  public Collection<StreamComponent> getUpStreamComponents() {
+    return this.inBound;
+  }
 
-    /**
-     * The inbound queue for this component
-     * @return inbound queue
-     */
-    public BlockingQueue<StreamsDatum> getInBoundQueue() {
-        return this.inQueue;
-    }
+  /**
+   * The inbound queue for this component
+   * @return inbound queue
+   */
+  public BlockingQueue<StreamsDatum> getInBoundQueue() {
+    return this.inQueue;
+  }
 
-    /**
-     * The number of tasks this to run this component
-     * @return
-     */
-    public int getNumTasks() {
-        return this.numTasks;
-    }
+  /**
+   * The number of tasks this to run this component
+   * @return
+   */
+  public int getNumTasks() {
+    return this.numTasks;
+  }
 
-    /**
-     * Creates a {@link org.apache.streams.local.tasks.StreamsTask} that is running a clone of this component whose
-     * inbound and outbound queues are appropriately connected to the parent and child nodes.
-     *
-     * @return StreamsTask for this component
-     * @param timeout The timeout to use in milliseconds for any tasks that support configurable timeout
-     */
-    public StreamsTask createConnectedTask(int timeout) {
-        StreamsTask task;
-        if(this.processor != null) {
-            if(this.numTasks > 1) {
-                task =  new StreamsProcessorTask((StreamsProcessor)SerializationUtil.cloneBySerialization(this.processor), streamConfig);
-                task.addInputQueue(this.inQueue);
-                for(BlockingQueue<StreamsDatum> q : this.outBound.values()) {
-                    task.addOutputQueue(q);
-                }
-            } else {
-                task = new StreamsProcessorTask(this.processor, streamConfig);
-                task.addInputQueue(this.inQueue);
-                for(BlockingQueue<StreamsDatum> q : this.outBound.values()) {
-                    task.addOutputQueue(q);
-                }
-            }
-        }
-        else if(this.writer != null) {
-            if(this.numTasks > 1) {
-                task = new StreamsPersistWriterTask((StreamsPersistWriter) SerializationUtil.cloneBySerialization(this.writer), streamConfig);
-                task.addInputQueue(this.inQueue);
-            } else {
-                task = new StreamsPersistWriterTask(this.writer, streamConfig);
-                task.addInputQueue(this.inQueue);
-            }
-        }
-        else if(this.provider != null) {
-            StreamsProvider prov;
-            if(this.numTasks > 1) {
-                prov = (StreamsProvider)SerializationUtil.cloneBySerialization(this.provider);
-            } else {
-                prov = this.provider;
-            }
-            if(this.dateRange == null && this.sequence == null)
-                task = new StreamsProviderTask(prov, this.perpetual, streamConfig);
-            else if(this.sequence != null)
-                task = new StreamsProviderTask(prov, this.sequence, streamConfig);
-            else
-                task = new StreamsProviderTask(prov, this.dateRange[0], this.dateRange[1], streamConfig);
-            //Adjust the timeout if necessary
-            if(timeout != 0) {
-                ((StreamsProviderTask)task).setTimeout(timeout);
-            }
-            for(BlockingQueue<StreamsDatum> q : this.outBound.values()) {
-                task.addOutputQueue(q);
-            }
-        }
-        else {
-            throw new InvalidStreamException("Underlying StreamComponoent was NULL.");
+  /**
+   * Creates a {@link org.apache.streams.local.tasks.StreamsTask} that is running a clone of this component whose
+   * inbound and outbound queues are appropriately connected to the parent and child nodes.
+   *
+   * @return StreamsTask for this component
+   * @param timeout The timeout to use in milliseconds for any tasks that support configurable timeout
+   */
+  public StreamsTask createConnectedTask(int timeout) {
+    StreamsTask task;
+    if(this.processor != null) {
+      if(this.numTasks > 1) {
+        task =  new StreamsProcessorTask((StreamsProcessor)SerializationUtil.cloneBySerialization(this.processor), streamConfig);
+        task.addInputQueue(this.inQueue);
+        for(BlockingQueue<StreamsDatum> q : this.outBound.values()) {
+          task.addOutputQueue(q);
         }
-
-        if(task != null) {
-            tasks.add(task);
+      } else {
+        task = new StreamsProcessorTask(this.processor, streamConfig);
+        task.addInputQueue(this.inQueue);
+        for(BlockingQueue<StreamsDatum> q : this.outBound.values()) {
+          task.addOutputQueue(q);
         }
-
-        return task;
+      }
     }
-
-    public List<StreamsTask> getStreamsTasks() {
-        return this.tasks;
+    else if(this.writer != null) {
+      if(this.numTasks > 1) {
+        task = new StreamsPersistWriterTask((StreamsPersistWriter) SerializationUtil.cloneBySerialization(this.writer), streamConfig);
+        task.addInputQueue(this.inQueue);
+      } else {
+        task = new StreamsPersistWriterTask(this.writer, streamConfig);
+        task.addInputQueue(this.inQueue);
+      }
     }
-
-    /**
-     * The unique of this component
-     * @return
-     */
-    public String getId() {
-        return this.id;
+    else if(this.provider != null) {
+      StreamsProvider prov;
+      if(this.numTasks > 1) {
+        prov = (StreamsProvider)SerializationUtil.cloneBySerialization(this.provider);
+      } else {
+        prov = this.provider;
+      }
+      if(this.dateRange == null && this.sequence == null)
+        task = new StreamsProviderTask(prov, this.perpetual, streamConfig);
+      else if(this.sequence != null)
+        task = new StreamsProviderTask(prov, this.sequence, streamConfig);
+      else
+        task = new StreamsProviderTask(prov, this.dateRange[0], this.dateRange[1], streamConfig);
+      //Adjust the timeout if necessary
+      if(timeout != 0) {
+        ((StreamsProviderTask)task).setTimeout(timeout);
+      }
+      for(BlockingQueue<StreamsDatum> q : this.outBound.values()) {
+        task.addOutputQueue(q);
+      }
     }
-
-    @Override
-    public int hashCode() {
-        return this.id.hashCode();
+    else {
+      throw new InvalidStreamException("Underlying StreamComponoent was NULL.");
     }
 
-    @Override
-    public boolean equals(Object o) {
-        if(o instanceof StreamComponent)
-            return this.id.equals(((StreamComponent) o).id);
-        else
-            return false;
+    if(task != null) {
+      tasks.add(task);
     }
 
-    protected StreamsOperation getOperation() {
-        if(this.processor != null) {
-            return (StreamsOperation) this.processor;
-        }
-        else if(this.writer != null) {
-            return (StreamsOperation) this.writer;
-        }
-        else if(this.provider != null) {
-            return (StreamsOperation) this.provider;
-        }
-        else {
-            throw new InvalidStreamException("Underlying StreamComponoent was NULL.");
-        }
-    }
+    return task;
+  }
+
+  public List<StreamsTask> getStreamsTasks() {
+    return this.tasks;
+  }
+
+  /**
+   * The unique of this component
+   * @return
+   */
+  public String getId() {
+    return this.id;
+  }
+
+  @Override
+  public int hashCode() {
+    return this.id.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if(o instanceof StreamComponent)
+      return this.id.equals(((StreamComponent) o).id);
+    else
+      return false;
+  }
 
-    @Deprecated
-    protected boolean isOperationCountable() {
-        return getOperation() instanceof DatumStatusCountable;
+  protected StreamsOperation getOperation() {
+    if(this.processor != null) {
+      return (StreamsOperation) this.processor;
     }
+    else if(this.writer != null) {
+      return (StreamsOperation) this.writer;
+    }
+    else if(this.provider != null) {
+      return (StreamsOperation) this.provider;
+    }
+    else {
+      throw new InvalidStreamException("Underlying StreamComponoent was NULL.");
+    }
+  }
+
+  @Deprecated
+  protected boolean isOperationCountable() {
+    return getOperation() instanceof DatumStatusCountable;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounter.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounter.java
index 34d2bcc..e2884d0 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounter.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounter.java
@@ -17,14 +17,13 @@
  */
 package org.apache.streams.local.counters;
 
-import net.jcip.annotations.ThreadSafe;
 import org.apache.streams.local.builders.LocalStreamBuilder;
 import org.apache.streams.util.ComponentUtils;
+
+import net.jcip.annotations.ThreadSafe;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.management.*;
-import java.lang.management.ManagementFactory;
 import java.util.concurrent.atomic.AtomicLong;
 
 /**
@@ -33,56 +32,56 @@ import java.util.concurrent.atomic.AtomicLong;
 @ThreadSafe
 public class DatumStatusCounter implements DatumStatusCounterMXBean{
 
-    public static final String NAME_TEMPLATE = "org.apache.streams.local:type=DatumCounter,name=%s,identifier=%s,startedAt=%s";
-    private static final Logger LOGGER = LoggerFactory.getLogger(DatumStatusCounter.class);
+  public static final String NAME_TEMPLATE = "org.apache.streams.local:type=DatumCounter,name=%s,identifier=%s,startedAt=%s";
+  private static final Logger LOGGER = LoggerFactory.getLogger(DatumStatusCounter.class);
 
-    private AtomicLong failed;
-    private AtomicLong passed;
+  private AtomicLong failed;
+  private AtomicLong passed;
 
-    public DatumStatusCounter(String id) {
-        this(id, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
-    }
+  public DatumStatusCounter(String id) {
+    this(id, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
+  }
 
-    public DatumStatusCounter(String id, String streamIdentifier, long startedAt) {
-        this.failed = new AtomicLong(0);
-        this.passed = new AtomicLong(0);
-        ComponentUtils.registerLocalMBean(String.format(NAME_TEMPLATE, id, streamIdentifier, startedAt), this);
-    }
+  public DatumStatusCounter(String id, String streamIdentifier, long startedAt) {
+    this.failed = new AtomicLong(0);
+    this.passed = new AtomicLong(0);
+    ComponentUtils.registerLocalMBean(String.format(NAME_TEMPLATE, id, streamIdentifier, startedAt), this);
+  }
 
-    public void incrementFailedCount() {
-        this.incrementFailedCount(1);
-    }
+  public void incrementFailedCount() {
+    this.incrementFailedCount(1);
+  }
 
-    public void incrementFailedCount(long delta) {
-        this.failed.addAndGet(delta);
-    }
+  public void incrementFailedCount(long delta) {
+    this.failed.addAndGet(delta);
+  }
 
-    public void incrementPassedCount() {
-        this.incrementPassedCount(1);
-    }
+  public void incrementPassedCount() {
+    this.incrementPassedCount(1);
+  }
 
-    public void incrementPassedCount(long delta) {
-        this.passed.addAndGet(delta);
-    }
+  public void incrementPassedCount(long delta) {
+    this.passed.addAndGet(delta);
+  }
 
 
-    @Override
-    public double getFailRate() {
-        double failed = this.failed.get();
-        double passed = this.passed.get();
-        if(failed == 0.0 && passed == 0) {
-            return 0.0;
-        }
-        return failed / (passed + failed);
+  @Override
+  public double getFailRate() {
+    double failed = this.failed.get();
+    double passed = this.passed.get();
+    if(failed == 0.0 && passed == 0) {
+      return 0.0;
     }
+    return failed / (passed + failed);
+  }
 
-    @Override
-    public long getNumFailed() {
-        return this.failed.get();
-    }
+  @Override
+  public long getNumFailed() {
+    return this.failed.get();
+  }
 
-    @Override
-    public long getNumPassed() {
-        return this.passed.get();
-    }
+  @Override
+  public long getNumPassed() {
+    return this.passed.get();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounterMXBean.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounterMXBean.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounterMXBean.java
index 7cc8df4..3a318a8 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounterMXBean.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/DatumStatusCounterMXBean.java
@@ -22,22 +22,22 @@ package org.apache.streams.local.counters;
  */
 public interface DatumStatusCounterMXBean {
 
-    /**
-     * Get number of failed datums
-     * @return number of failed datums
-     */
-    public long getNumFailed();
+  /**
+   * Get number of failed datums
+   * @return number of failed datums
+   */
+  public long getNumFailed();
 
-    /**
-     * Get number of passed datums
-     * @return number of passed datums
-     */
-    public long getNumPassed();
+  /**
+   * Get number of passed datums
+   * @return number of passed datums
+   */
+  public long getNumPassed();
 
-    /**
-     * Get the failure rate.  Calculated by num failed divided by (num passed + num failed)
-     * @return the failure rate
-     */
-    public double getFailRate();
+  /**
+   * Get the failure rate.  Calculated by num failed divided by (num passed + num failed)
+   * @return the failure rate
+   */
+  public double getFailRate();
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounter.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounter.java
index 9bd5d49..de37f1b 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounter.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounter.java
@@ -17,16 +17,14 @@
  */
 package org.apache.streams.local.counters;
 
-import net.jcip.annotations.GuardedBy;
-import net.jcip.annotations.ThreadSafe;
 import org.apache.streams.local.builders.LocalStreamBuilder;
 import org.apache.streams.util.ComponentUtils;
-import org.joda.time.DateTime;
+
+import net.jcip.annotations.GuardedBy;
+import net.jcip.annotations.ThreadSafe;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.management.*;
-import java.lang.management.ManagementFactory;
 import java.util.concurrent.atomic.AtomicLong;
 
 /**
@@ -35,133 +33,133 @@ import java.util.concurrent.atomic.AtomicLong;
 @ThreadSafe
 public class StreamsTaskCounter implements StreamsTaskCounterMXBean {
 
-    public static final String NAME_TEMPLATE = "org.apache.streams.local:type=StreamsTaskCounter,name=%s,identifier=%s,startedAt=%s";
-    private static final Logger LOGGER = LoggerFactory.getLogger(StreamsTaskCounter.class);
-
-    private AtomicLong emitted;
-    private AtomicLong received;
-    private AtomicLong errors;
-    private AtomicLong totalTime;
-    @GuardedBy("this")
-    private volatile long maxTime;
-
-    /**
-     *
-     * @param id
-     */
-    public StreamsTaskCounter(String id) {
-        this(id, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
-    }
-
-    /**
-     *
-     * @param id
-     */
-    public StreamsTaskCounter(String id, String streamId, long startedAt) {
-        this.emitted = new AtomicLong(0);
-        this.received = new AtomicLong(0);
-        this.errors = new AtomicLong(0);
-        this.totalTime = new AtomicLong(0);
-        this.maxTime = -1;
-        ComponentUtils.registerLocalMBean(String.format(NAME_TEMPLATE, id, streamId, startedAt), this);
+  public static final String NAME_TEMPLATE = "org.apache.streams.local:type=StreamsTaskCounter,name=%s,identifier=%s,startedAt=%s";
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsTaskCounter.class);
+
+  private AtomicLong emitted;
+  private AtomicLong received;
+  private AtomicLong errors;
+  private AtomicLong totalTime;
+  @GuardedBy("this")
+  private volatile long maxTime;
+
+  /**
+   *
+   * @param id
+   */
+  public StreamsTaskCounter(String id) {
+    this(id, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
+  }
+
+  /**
+   *
+   * @param id
+   */
+  public StreamsTaskCounter(String id, String streamId, long startedAt) {
+    this.emitted = new AtomicLong(0);
+    this.received = new AtomicLong(0);
+    this.errors = new AtomicLong(0);
+    this.totalTime = new AtomicLong(0);
+    this.maxTime = -1;
+    ComponentUtils.registerLocalMBean(String.format(NAME_TEMPLATE, id, streamId, startedAt), this);
+  }
+
+  /**
+   * Increment emitted count
+   */
+  public void incrementEmittedCount() {
+    this.incrementEmittedCount(1);
+  }
+
+  /**
+   * Increment emitted count
+   * @param delta
+   */
+  public void incrementEmittedCount(long delta) {
+    this.emitted.addAndGet(delta);
+  }
+
+  /**
+   * Increment error count
+   */
+  public void incrementErrorCount() {
+    this.incrementErrorCount(1);
+  }
+
+  /**
+   * Increment error count
+   * @param delta
+   */
+  public void incrementErrorCount(long delta) {
+    this.errors.addAndGet(delta);
+  }
+
+  /**
+   * Increment received count
+   */
+  public void incrementReceivedCount() {
+    this.incrementReceivedCount(1);
+  }
+
+  /**
+   * Increment received count
+   * @param delta
+   */
+  public void incrementReceivedCount(long delta) {
+    this.received.addAndGet(delta);
+  }
+
+  /**
+   * Add the time it takes to process a single datum in milliseconds
+   * @param processTime
+   */
+  public void addTime(long processTime) {
+    synchronized (this) {
+      if(processTime > this.maxTime) {
+        this.maxTime = processTime;
+      }
     }
+    this.totalTime.addAndGet(processTime);
+  }
 
-    /**
-     * Increment emitted count
-     */
-    public void incrementEmittedCount() {
-        this.incrementEmittedCount(1);
+  @Override
+  public double getErrorRate() {
+    if(this.received.get() == 0) {
+      return 0.0;
     }
-
-    /**
-     * Increment emitted count
-     * @param delta
-     */
-    public void incrementEmittedCount(long delta) {
-        this.emitted.addAndGet(delta);
+    return (double) this.errors.get() / (double) this.received.get();
+  }
+
+  @Override
+  public long getNumEmitted() {
+    return this.emitted.get();
+  }
+
+  @Override
+  public long getNumReceived() {
+    return this.received.get();
+  }
+
+  @Override
+  public long getNumUnhandledErrors() {
+    return this.errors.get();
+  }
+
+  @Override
+  public double getAvgTime() {
+    long rec = this.received.get();
+    long emit = this.emitted.get();
+    if(rec == 0 && emit == 0 ) {
+      return 0.0;
+    } else if( rec == 0) { //provider instance
+      return this.totalTime.get() / (double) emit;
+    } else {
+      return this.totalTime.get() / ((double) this.received.get() - this.errors.get());
     }
+  }
 
-    /**
-     * Increment error count
-     */
-    public void incrementErrorCount() {
-        this.incrementErrorCount(1);
-    }
-
-    /**
-     * Increment error count
-     * @param delta
-     */
-    public void incrementErrorCount(long delta) {
-        this.errors.addAndGet(delta);
-    }
-
-    /**
-     * Increment received count
-     */
-    public void incrementReceivedCount() {
-        this.incrementReceivedCount(1);
-    }
-
-    /**
-     * Increment received count
-     * @param delta
-     */
-    public void incrementReceivedCount(long delta) {
-        this.received.addAndGet(delta);
-    }
-
-    /**
-     * Add the time it takes to process a single datum in milliseconds
-     * @param processTime
-     */
-    public void addTime(long processTime) {
-        synchronized (this) {
-            if(processTime > this.maxTime) {
-                this.maxTime = processTime;
-            }
-        }
-        this.totalTime.addAndGet(processTime);
-    }
-
-    @Override
-    public double getErrorRate() {
-        if(this.received.get() == 0) {
-            return 0.0;
-        }
-        return (double) this.errors.get() / (double) this.received.get();
-    }
-
-    @Override
-    public long getNumEmitted() {
-        return this.emitted.get();
-    }
-
-    @Override
-    public long getNumReceived() {
-        return this.received.get();
-    }
-
-    @Override
-    public long getNumUnhandledErrors() {
-        return this.errors.get();
-    }
-
-    @Override
-    public double getAvgTime() {
-        long rec = this.received.get();
-        long emit = this.emitted.get();
-        if(rec == 0 && emit == 0 ) {
-            return 0.0;
-        } else if( rec == 0) { //provider instance
-            return this.totalTime.get() / (double) emit;
-        } else {
-            return this.totalTime.get() / ((double) this.received.get() - this.errors.get());
-        }
-    }
-
-    @Override
-    public long getMaxTime() {
-        return this.maxTime;
-    }
+  @Override
+  public long getMaxTime() {
+    return this.maxTime;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounterMXBean.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounterMXBean.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounterMXBean.java
index 8ac2e33..062eb04 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounterMXBean.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/counters/StreamsTaskCounterMXBean.java
@@ -22,42 +22,42 @@ package org.apache.streams.local.counters;
  */
 public interface StreamsTaskCounterMXBean {
 
-    /**
-     * Get the error rate of the streams process calculated by the number of errors not handled by the {@link org.apache.streams.local.tasks.StreamsTask}
-     * divided by the number of datums received.
-     * @return error rate
-     */
-    public double getErrorRate();
-
-    /**
-     * Get the number of {@link org.apache.streams.core.StreamsDatum}s emitted by the streams process
-     * @return number of emitted datums
-     */
-    public long getNumEmitted();
-
-    /**
-     * Get the number of {@link org.apache.streams.core.StreamsDatum}s received by the streams process
-     * @return number of received datums
-     */
-    public long getNumReceived();
-
-    /**
-     * Get the number of errors that the process had to catch because the executing Provider/Processor/Writer did not
-     * catch and handle the exception
-     * @return number of handled errors
-     */
-    public long getNumUnhandledErrors();
-
-    /**
-     * Returns the average time in milliseconds it takes the task to readCurrent, process, or write to return.
-     * @return
-     */
-    public double getAvgTime();
-
-    /**
-     * Returns the max time in milliseconds it takes the task to readCurrent, process, or write to return.
-     * @return
-     */
-    public long getMaxTime();
+  /**
+   * Get the error rate of the streams process calculated by the number of errors not handled by the {@link org.apache.streams.local.tasks.StreamsTask}
+   * divided by the number of datums received.
+   * @return error rate
+   */
+  public double getErrorRate();
+
+  /**
+   * Get the number of {@link org.apache.streams.core.StreamsDatum}s emitted by the streams process
+   * @return number of emitted datums
+   */
+  public long getNumEmitted();
+
+  /**
+   * Get the number of {@link org.apache.streams.core.StreamsDatum}s received by the streams process
+   * @return number of received datums
+   */
+  public long getNumReceived();
+
+  /**
+   * Get the number of errors that the process had to catch because the executing Provider/Processor/Writer did not
+   * catch and handle the exception
+   * @return number of handled errors
+   */
+  public long getNumUnhandledErrors();
+
+  /**
+   * Returns the average time in milliseconds it takes the task to readCurrent, process, or write to return.
+   * @return
+   */
+  public double getAvgTime();
+
+  /**
+   * Returns the max time in milliseconds it takes the task to readCurrent, process, or write to return.
+   * @return
+   */
+  public long getMaxTime();
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandleThrowableThreadPoolExecutor.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandleThrowableThreadPoolExecutor.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandleThrowableThreadPoolExecutor.java
index c9cfec4..38bda24 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandleThrowableThreadPoolExecutor.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandleThrowableThreadPoolExecutor.java
@@ -19,10 +19,13 @@
 package org.apache.streams.local.executors;
 
 import org.apache.streams.local.builders.LocalStreamBuilder;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.concurrent.*;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 
 /**
  * A fixed ThreadPoolExecutor that will shutdown a stream upon a thread ending execution due to an unhandled throwable.
@@ -30,35 +33,35 @@ import java.util.concurrent.*;
  */
 public class ShutdownStreamOnUnhandleThrowableThreadPoolExecutor extends ThreadPoolExecutor {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(ShutdownStreamOnUnhandleThrowableThreadPoolExecutor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ShutdownStreamOnUnhandleThrowableThreadPoolExecutor.class);
 
-    private LocalStreamBuilder streamBuilder;
-    private volatile boolean isStoped;
+  private LocalStreamBuilder streamBuilder;
+  private volatile boolean isStoped;
 
-    /**
-     * Creates a fixed size thread pool where corePoolSize & maximumPoolSize equal numThreads with an unbounded queue.
-     * @param numThreads number of threads in pool
-     * @param streamBuilder streambuilder to call {@link org.apache.streams.core.StreamBuilder#stop()} on upon receiving an unhandled throwable
-     */
-    public ShutdownStreamOnUnhandleThrowableThreadPoolExecutor(int numThreads, LocalStreamBuilder streamBuilder) {
-        super(numThreads, numThreads, 1, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
-        this.streamBuilder = streamBuilder;
-        this.isStoped = false;
-    }
+  /**
+   * Creates a fixed size thread pool where corePoolSize & maximumPoolSize equal numThreads with an unbounded queue.
+   * @param numThreads number of threads in pool
+   * @param streamBuilder streambuilder to call {@link org.apache.streams.core.StreamBuilder#stop()} on upon receiving an unhandled throwable
+   */
+  public ShutdownStreamOnUnhandleThrowableThreadPoolExecutor(int numThreads, LocalStreamBuilder streamBuilder) {
+    super(numThreads, numThreads, 1, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
+    this.streamBuilder = streamBuilder;
+    this.isStoped = false;
+  }
 
-    @Override
-    protected void afterExecute(Runnable r, Throwable t) {
-        if(t != null) {
-            LOGGER.error("Runnable, {}, exited with an unhandled throwable! : {}", r.getClass(), t);
-            LOGGER.error("Attempting to shut down stream.");
-            synchronized (this) {
-                if (!this.isStoped) {
-                    this.isStoped = true;
-                    this.streamBuilder.stop();
-                }
-            }
-        } else {
-            LOGGER.trace("Runnable, {}, finished executing.", r.getClass());
+  @Override
+  protected void afterExecute(Runnable r, Throwable t) {
+    if(t != null) {
+      LOGGER.error("Runnable, {}, exited with an unhandled throwable! : {}", r.getClass(), t);
+      LOGGER.error("Attempting to shut down stream.");
+      synchronized (this) {
+        if (!this.isStoped) {
+          this.isStoped = true;
+          this.streamBuilder.stop();
         }
+      }
+    } else {
+      LOGGER.trace("Runnable, {}, finished executing.", r.getClass());
     }
+  }
 }


[28/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusActivityDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusActivityDeserializer.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusActivityDeserializer.java
index 7ff1d1e..79f1815 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusActivityDeserializer.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusActivityDeserializer.java
@@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.JsonNode;
 import com.google.api.client.util.DateTime;
 import com.google.api.client.util.Lists;
 import com.google.api.services.plus.model.Activity;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -34,130 +35,137 @@ import java.io.IOException;
 import java.util.List;
 
 /**
- * Custom deserializer for GooglePlus' Person model
+ * Custom deserializer for GooglePlus' Person model.
  */
 public class GPlusActivityDeserializer extends JsonDeserializer<Activity> {
-    private final static Logger LOGGER = LoggerFactory.getLogger(GPlusActivityDeserializer.class);
-
-    /**
-     * Because the GooglePlus Activity object {@link com.google.api.services.plus.model.Activity} contains complex objects
-     * within its hierarchy, we have to use a custom deserializer
-     *
-     * @param jsonParser
-     * @param deserializationContext
-     * @return The deserialized {@link com.google.api.services.plus.model.Activity} object
-     * @throws IOException
-     * @throws JsonProcessingException
-     */
-    @Override
-    public Activity deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
-
-        JsonNode node = jsonParser.getCodec().readTree(jsonParser);
-        Activity activity = new Activity();
-
-        try {
-            activity.setUrl(node.get("url").asText());
-            activity.setEtag(node.get("etag").asText());
-            activity.setTitle(node.get("title").asText());
-            activity.setPublished(DateTime.parseRfc3339(node.get("published").asText()));
-            activity.setUpdated(DateTime.parseRfc3339(node.get("updated").asText()));
-            activity.setId(node.get("id").asText());
-            activity.setVerb(node.get("verb").asText());
-
-            activity.setActor(buildActor(node));
-
-            activity.setObject(buildPlusObject(node));
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to deserialize activity object: {}", e);
-        }
 
-        return activity;
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusActivityDeserializer.class);
+
+  /**
+   * Because the GooglePlus Activity object {@link com.google.api.services.plus.model.Activity} contains complex objects
+   * within its hierarchy, we have to use a custom deserializer
+   *
+   * @param jsonParser jsonParser
+   * @param deserializationContext deserializationContext
+   * @return The deserialized {@link com.google.api.services.plus.model.Activity} object
+   * @throws IOException IOException
+   * @throws JsonProcessingException JsonProcessingException
+   */
+  @Override
+  public Activity deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
+
+    JsonNode node = jsonParser.getCodec().readTree(jsonParser);
+    Activity activity = new Activity();
+
+    try {
+      activity.setUrl(node.get("url").asText());
+      activity.setEtag(node.get("etag").asText());
+      activity.setTitle(node.get("title").asText());
+      activity.setPublished(DateTime.parseRfc3339(node.get("published").asText()));
+      activity.setUpdated(DateTime.parseRfc3339(node.get("updated").asText()));
+      activity.setId(node.get("id").asText());
+      activity.setVerb(node.get("verb").asText());
+
+      activity.setActor(buildActor(node));
+
+      activity.setObject(buildPlusObject(node));
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to deserialize activity object: {}", ex);
     }
 
-    /**
-     * Given a raw JsonNode, build out the G+ {@link com.google.api.services.plus.model.Activity.Actor} object
-     *
-     * @param node
-     * @return {@link com.google.api.services.plus.model.Activity.Actor} object
-     */
-    private Activity.Actor buildActor(JsonNode node) {
-        Activity.Actor actor = new Activity.Actor();
-        JsonNode actorNode = node.get("actor");
-
-        actor.setId(actorNode.get("id").asText());
-        actor.setDisplayName(actorNode.get("displayName").asText());
-        actor.setUrl(actorNode.get("url").asText());
-
-        Activity.Actor.Image image = new Activity.Actor.Image();
-        JsonNode imageNode = actorNode.get("image");
-        image.setUrl(imageNode.get("url").asText());
-
-        actor.setImage(image);
-
-        return actor;
-    }
+    return activity;
+  }
+
+  /**
+   * Given a raw JsonNode, build out the G+ {@link com.google.api.services.plus.model.Activity.Actor} object
+   *
+   * @param node node
+   * @return {@link com.google.api.services.plus.model.Activity.Actor} object
+   */
+  private Activity.Actor buildActor(JsonNode node) {
+    Activity.Actor actor = new Activity.Actor();
+    JsonNode actorNode = node.get("actor");
+
+    actor.setId(actorNode.get("id").asText());
+    actor.setDisplayName(actorNode.get("displayName").asText());
+    actor.setUrl(actorNode.get("url").asText());
+
+    Activity.Actor.Image image = new Activity.Actor.Image();
+    JsonNode imageNode = actorNode.get("image");
+    image.setUrl(imageNode.get("url").asText());
+
+    actor.setImage(image);
+
+    return actor;
+  }
+
+  /**
+   * Given a JsonNode, build out all aspects of the {@link com.google.api.services.plus.model.Activity.PlusObject} object
+   *
+   * @param node node
+   * @return {@link com.google.api.services.plus.model.Activity.PlusObject} object
+   */
+  private Activity.PlusObject buildPlusObject(JsonNode node) {
+    Activity.PlusObject object = new Activity.PlusObject();
+    JsonNode objectNode = node.get("object");
+    object.setObjectType(objectNode.get("objectType").asText());
+    object.setContent(objectNode.get("content").asText());
+    object.setUrl(objectNode.get("url").asText());
+
+    Activity.PlusObject.Replies replies = new Activity.PlusObject.Replies();
+    JsonNode repliesNode = objectNode.get("replies");
+    replies.setTotalItems(repliesNode.get("totalItems").asLong());
+    replies.setSelfLink(repliesNode.get("selfLink").asText());
+    object.setReplies(replies);
+
+    Activity.PlusObject.Plusoners plusoners = new Activity.PlusObject.Plusoners();
+    JsonNode plusonersNode = objectNode.get("plusoners");
+    plusoners.setTotalItems(plusonersNode.get("totalItems").asLong());
+    plusoners.setSelfLink(plusonersNode.get("selfLink").asText());
+    object.setPlusoners(plusoners);
+
+    Activity.PlusObject.Resharers resharers = new Activity.PlusObject.Resharers();
+    JsonNode resharersNode = objectNode.get("resharers");
+    resharers.setTotalItems(resharersNode.get("totalItems").asLong());
+    resharers.setSelfLink(resharersNode.get("selfLink").asText());
+    object.setResharers(resharers);
+
+    object.setAttachments(buildAttachments(objectNode));//attachments);
+
+    return object;
+  }
+
+  /**
+   * Given a raw JsonNode representation of an Activity's attachments, build out that
+   * list of {@link com.google.api.services.plus.model.Activity.PlusObject.Attachments} objects
+   *
+   * @param objectNode objectNode
+   * @return list of {@link com.google.api.services.plus.model.Activity.PlusObject.Attachments} objects
+   */
+  private List<Activity.PlusObject.Attachments> buildAttachments(JsonNode objectNode) {
+    List<Activity.PlusObject.Attachments> attachments = Lists.newArrayList();
+    if ( objectNode.has("attachments") ) {
+      for (JsonNode attachmentNode : objectNode.get("attachments")) {
+        Activity.PlusObject.Attachments attachments1 = new Activity.PlusObject.Attachments();
+        attachments1.setObjectType(attachmentNode.get("objectType").asText());
+        if (attachmentNode.has("displayName")) {
+          attachments1.setDisplayName(attachmentNode.get("displayName").asText());
+        }
+        if (attachmentNode.has("content")) {
+          attachments1.setContent(attachmentNode.get("content").asText());
+        }
+        if (attachmentNode.has("url")) {
+          attachments1.setUrl(attachmentNode.get("url").asText());
+        }
 
-    /**
-     * Given a JsonNode, build out all aspects of the {@link com.google.api.services.plus.model.Activity.PlusObject} object
-     *
-     * @param node
-     * @return {@link com.google.api.services.plus.model.Activity.PlusObject} object
-     */
-    private Activity.PlusObject buildPlusObject(JsonNode node) {
-        Activity.PlusObject object = new Activity.PlusObject();
-        JsonNode objectNode = node.get("object");
-        object.setObjectType(objectNode.get("objectType").asText());
-        object.setContent(objectNode.get("content").asText());
-        object.setUrl(objectNode.get("url").asText());
-
-        Activity.PlusObject.Replies replies = new Activity.PlusObject.Replies();
-        JsonNode repliesNode = objectNode.get("replies");
-        replies.setTotalItems(repliesNode.get("totalItems").asLong());
-        replies.setSelfLink(repliesNode.get("selfLink").asText());
-        object.setReplies(replies);
-
-        Activity.PlusObject.Plusoners plusoners = new Activity.PlusObject.Plusoners();
-        JsonNode plusonersNode = objectNode.get("plusoners");
-        plusoners.setTotalItems(plusonersNode.get("totalItems").asLong());
-        plusoners.setSelfLink(plusonersNode.get("selfLink").asText());
-        object.setPlusoners(plusoners);
-
-        Activity.PlusObject.Resharers resharers = new Activity.PlusObject.Resharers();
-        JsonNode resharersNode = objectNode.get("resharers");
-        resharers.setTotalItems(resharersNode.get("totalItems").asLong());
-        resharers.setSelfLink(resharersNode.get("selfLink").asText());
-        object.setResharers(resharers);
-
-        object.setAttachments(buildAttachments(objectNode));//attachments);
-
-        return object;
-    }
+        Activity.PlusObject.Attachments.Image image1 = new Activity.PlusObject.Attachments.Image();
+        JsonNode imageNode1 = attachmentNode.get("image");
+        image1.setUrl(imageNode1.get("url").asText());
+        attachments1.setImage(image1);
 
-    /**
-     * Given a raw JsonNode representation of an Activity's attachments, build out that
-     * list of {@link com.google.api.services.plus.model.Activity.PlusObject.Attachments} objects
-     *
-     * @param objectNode
-     * @return list of {@link com.google.api.services.plus.model.Activity.PlusObject.Attachments} objects
-     */
-    private List<Activity.PlusObject.Attachments> buildAttachments(JsonNode objectNode) {
-        List<Activity.PlusObject.Attachments> attachments = Lists.newArrayList();
-        if( objectNode.has("attachments") )
-            for (JsonNode attachmentNode : objectNode.get("attachments")) {
-                Activity.PlusObject.Attachments attachments1 = new Activity.PlusObject.Attachments();
-                attachments1.setObjectType(attachmentNode.get("objectType").asText());
-                if( attachmentNode.has("displayName")) attachments1.setDisplayName(attachmentNode.get("displayName").asText());
-                if( attachmentNode.has("content")) attachments1.setContent(attachmentNode.get("content").asText());
-                if( attachmentNode.has("url")) attachments1.setUrl(attachmentNode.get("url").asText());
-
-                Activity.PlusObject.Attachments.Image image1 = new Activity.PlusObject.Attachments.Image();
-                JsonNode imageNode1 = attachmentNode.get("image");
-                image1.setUrl(imageNode1.get("url").asText());
-                attachments1.setImage(image1);
-
-                attachments.add(attachments1);
-            }
-
-        return attachments;
+        attachments.add(attachments1);
+      }
     }
+    return attachments;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusCommentDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusCommentDeserializer.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusCommentDeserializer.java
index 956309a..d143419 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusCommentDeserializer.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusCommentDeserializer.java
@@ -19,6 +19,8 @@
 
 package com.google.gplus.serializer.util;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.DeserializationContext;
@@ -28,71 +30,76 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.api.client.util.DateTime;
 import com.google.api.client.util.Lists;
 import com.google.api.services.plus.model.Comment;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.List;
 
+/**
+ * GPlusCommentDeserializer converts gplus comments to as1 comments.
+ */
 public class GPlusCommentDeserializer  extends JsonDeserializer<Comment> {
-    private final static Logger LOGGER = LoggerFactory.getLogger(GPlusActivityDeserializer.class);
-
-    /**
-     * Because the GooglePlus Comment object {@link com.google.api.services.plus.model.Comment} contains complex objects
-     * within its hierarchy, we have to use a custom deserializer
-     *
-     * @param jsonParser
-     * @param deserializationContext
-     * @return The deserialized {@link com.google.api.services.plus.model.Comment} object
-     * @throws java.io.IOException
-     * @throws com.fasterxml.jackson.core.JsonProcessingException
-     */
-    @Override
-    public Comment deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
-
-        JsonNode node = jsonParser.getCodec().readTree(jsonParser);
-        ObjectMapper objectMapper = StreamsJacksonMapper.getInstance();
-        Comment comment = new Comment();
-
-        try {
-            comment.setEtag(node.get("etag").asText());
-            comment.setVerb(node.get("verb").asText());
-            comment.setId(node.get("id").asText());
-            comment.setPublished(DateTime.parseRfc3339(node.get("published").asText()));
-            comment.setUpdated(DateTime.parseRfc3339(node.get("updated").asText()));
-
-            Comment.Actor actor = new Comment.Actor();
-            JsonNode actorNode = node.get("actor");
-            actor.setDisplayName(actorNode.get("displayName").asText());
-            actor.setUrl(actorNode.get("url").asText());
-
-            Comment.Actor.Image image = new Comment.Actor.Image();
-            JsonNode imageNode = actorNode.get("image");
-            image.setUrl(imageNode.get("url").asText());
-
-            actor.setImage(image);
-
-            comment.setObject(objectMapper.readValue(objectMapper.writeValueAsString(node.get("object")), Comment.PlusObject.class));
-
-            comment.setSelfLink(node.get("selfLink").asText());
-
-            List<Comment.InReplyTo> replies = Lists.newArrayList();
-            for(JsonNode reply : node.get("inReplyTo")) {
-                Comment.InReplyTo r = objectMapper.readValue(objectMapper.writeValueAsString(reply), Comment.InReplyTo.class);
-                replies.add(r);
-            }
-
-            comment.setInReplyTo(replies);
-
-            Comment.Plusoners plusoners = new Comment.Plusoners();
-            JsonNode plusonersNode = node.get("plusoners");
-            plusoners.setTotalItems(plusonersNode.get("totalItems").asLong());
-            comment.setPlusoners(plusoners);
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to deserialize activity object: {}", e);
-        }
-
-        return comment;
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusActivityDeserializer.class);
+
+  /**
+   * Because the GooglePlus Comment object {@link com.google.api.services.plus.model.Comment} contains complex objects
+   * within its hierarchy, we have to use a custom deserializer
+   *
+   * @param jsonParser jsonParser
+   * @param deserializationContext deserializationContext
+   * @return The deserialized {@link com.google.api.services.plus.model.Comment} object
+   * @throws java.io.IOException IOException
+   * @throws com.fasterxml.jackson.core.JsonProcessingException JsonProcessingException
+   */
+  @Override
+  public Comment deserialize(JsonParser jsonParser, DeserializationContext deserializationContext)
+      throws IOException, JsonProcessingException {
+
+    JsonNode node = jsonParser.getCodec().readTree(jsonParser);
+    ObjectMapper objectMapper = StreamsJacksonMapper.getInstance();
+    Comment comment = new Comment();
+
+    try {
+      comment.setEtag(node.get("etag").asText());
+      comment.setVerb(node.get("verb").asText());
+      comment.setId(node.get("id").asText());
+      comment.setPublished(DateTime.parseRfc3339(node.get("published").asText()));
+      comment.setUpdated(DateTime.parseRfc3339(node.get("updated").asText()));
+
+      Comment.Actor actor = new Comment.Actor();
+      JsonNode actorNode = node.get("actor");
+      actor.setDisplayName(actorNode.get("displayName").asText());
+      actor.setUrl(actorNode.get("url").asText());
+
+      Comment.Actor.Image image = new Comment.Actor.Image();
+      JsonNode imageNode = actorNode.get("image");
+      image.setUrl(imageNode.get("url").asText());
+
+      actor.setImage(image);
+
+      comment.setObject(objectMapper.readValue(objectMapper.writeValueAsString(node.get("object")), Comment.PlusObject.class));
+
+      comment.setSelfLink(node.get("selfLink").asText());
+
+      List<Comment.InReplyTo> replies = Lists.newArrayList();
+      for (JsonNode reply : node.get("inReplyTo")) {
+        Comment.InReplyTo irt = objectMapper.readValue(objectMapper.writeValueAsString(reply), Comment.InReplyTo.class);
+        replies.add(irt);
+      }
+
+      comment.setInReplyTo(replies);
+
+      Comment.Plusoners plusoners = new Comment.Plusoners();
+      JsonNode plusonersNode = node.get("plusoners");
+      plusoners.setTotalItems(plusonersNode.get("totalItems").asLong());
+      comment.setPlusoners(plusoners);
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to deserialize activity object: {}", ex);
     }
+
+    return comment;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusEventClassifier.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusEventClassifier.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusEventClassifier.java
index 3dfac33..6e04dc9 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusEventClassifier.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusEventClassifier.java
@@ -18,40 +18,50 @@
 
 package com.google.gplus.serializer.util;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.api.services.plus.model.Activity;
 import com.google.api.services.plus.model.Person;
 import com.google.common.base.Preconditions;
 import org.apache.commons.lang.StringUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 
 import java.io.IOException;
 import java.io.Serializable;
 
+/**
+ * GPlusEventClassifier classifies GPlus Events.
+ */
 public class GPlusEventClassifier implements Serializable {
-    private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-    private static final String ACTIVITY_IDENTIFIER = "\"plus#activity\"";
-    private static final String PERSON_IDENTIFIER = "\"plus#person\"";
-
-    public static Class detectClass(String json) {
-        Preconditions.checkNotNull(json);
-        Preconditions.checkArgument(StringUtils.isNotEmpty(json));
-
-        ObjectNode objectNode;
-        try {
-            objectNode = (ObjectNode) mapper.readTree(json);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return null;
-        }
-
-        if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().equals(ACTIVITY_IDENTIFIER)) {
-            return Activity.class;
-        } else if(objectNode.findValue("kind") != null && objectNode.get("kind").toString().equals(PERSON_IDENTIFIER)) {
-            return Person.class;
-        } else  {
-            return ObjectNode.class;
-        }
+
+  private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final String ACTIVITY_IDENTIFIER = "\"plus#activity\"";
+  private static final String PERSON_IDENTIFIER = "\"plus#person\"";
+
+  /**
+   * Detect likely class of String json.
+   * @param json String json
+   * @return likely class
+   */
+  public static Class detectClass(String json) {
+    Preconditions.checkNotNull(json);
+    Preconditions.checkArgument(StringUtils.isNotEmpty(json));
+
+    ObjectNode objectNode;
+    try {
+      objectNode = (ObjectNode) mapper.readTree(json);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return null;
+    }
+
+    if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().equals(ACTIVITY_IDENTIFIER)) {
+      return Activity.class;
+    } else if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().equals(PERSON_IDENTIFIER)) {
+      return Person.class;
+    } else  {
+      return ObjectNode.class;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusPersonDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusPersonDeserializer.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusPersonDeserializer.java
index e562d4f..f70335b 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusPersonDeserializer.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GPlusPersonDeserializer.java
@@ -19,6 +19,8 @@
 
 package com.google.gplus.serializer.util;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.DeserializationContext;
@@ -28,7 +30,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.IntNode;
 import com.google.api.client.util.Lists;
 import com.google.api.services.plus.model.Person;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,74 +38,80 @@ import java.io.IOException;
 import java.util.List;
 
 /**
- * Custom deserializer for GooglePlus' Person model
+ * Custom deserializer for GooglePlus' Person model.
  */
 public class GPlusPersonDeserializer extends JsonDeserializer<Person> {
-    private final static Logger LOGGER = LoggerFactory.getLogger(GPlusPersonDeserializer.class);
-
-    /**
-     * Because the GooglePlus Person object contains complex objects within its hierarchy, we have to use
-     * a custom deserializer
-     *
-     * @param jsonParser
-     * @param deserializationContext
-     * @return The deserialized {@link com.google.api.services.plus.model.Person} object
-     * @throws IOException
-     * @throws JsonProcessingException
-     */
-    @Override
-    public Person deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
-        ObjectMapper m = StreamsJacksonMapper.getInstance();
-
-        JsonNode node = jsonParser.getCodec().readTree(jsonParser);
-        Person person = new Person();
-        try {
-
-            person.setCircledByCount((Integer) ((IntNode) node.get("circledByCount")).numberValue());
-            person.setDisplayName(node.get("displayName").asText());
-            person.setEtag(node.get("etag").asText());
-            person.setGender(node.get("gender").asText());
-            person.setId(node.get("id").asText());
-
-            Person.Image image = new Person.Image();
-            JsonNode imageNode = node.get("image");
-            image.setIsDefault(imageNode.get("isDefault").asBoolean());
-            image.setUrl(imageNode.get("url").asText());
-            person.setImage(image);
-
-            person.setIsPlusUser(node.get("isPlusUser").asBoolean());
-            person.setKind(node.get("kind").asText());
-
-            JsonNode nameNode = node.get("name");
-            Person.Name name = m.readValue(m.writeValueAsString(nameNode), Person.Name.class);
-            person.setName(name);
-
-            person.setObjectType(node.get("objectType").asText());
-
-            List<Person.Organizations> organizations = Lists.newArrayList();
-            for (JsonNode orgNode : node.get("organizations")) {
-                Person.Organizations org = m.readValue(m.writeValueAsString(orgNode), Person.Organizations.class);
-                organizations.add(org);
-            }
-            person.setOrganizations(organizations);
-
-            person.setUrl(node.get("url").asText());
-            person.setVerified(node.get("verified").asBoolean());
-
-            List<Person.Emails> emails = Lists.newArrayList();
-            if( node.has("emails")) {
-                for (JsonNode emailNode : node.get("emails")) {
-                    Person.Emails email = m.readValue(m.writeValueAsString(emailNode), Person.Emails.class);
-                    emails.add(email);
-                }
-            }
-
-            if( node.has("tagline")) person.setTagline(node.get("tagline").asText());
-            if( node.has("aboutMe")) person.setAboutMe(node.get("aboutMe").asText());
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to deserialize a Person object: {}", e);
-        }
 
-        return person;
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusPersonDeserializer.class);
+
+  /**
+   * Because the GooglePlus Person object contains complex objects within its hierarchy, we have to use
+   * a custom deserializer
+   *
+   * @param jsonParser jsonParser
+   * @param deserializationContext deserializationContext
+   * @return The deserialized {@link com.google.api.services.plus.model.Person} object
+   * @throws IOException IOException
+   * @throws JsonProcessingException JsonProcessingException
+   */
+  @Override
+  public Person deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+    JsonNode node = jsonParser.getCodec().readTree(jsonParser);
+    Person person = new Person();
+    try {
+
+      person.setCircledByCount((Integer) ((IntNode) node.get("circledByCount")).numberValue());
+      person.setDisplayName(node.get("displayName").asText());
+      person.setEtag(node.get("etag").asText());
+      person.setGender(node.get("gender").asText());
+      person.setId(node.get("id").asText());
+
+      Person.Image image = new Person.Image();
+      JsonNode imageNode = node.get("image");
+      image.setIsDefault(imageNode.get("isDefault").asBoolean());
+      image.setUrl(imageNode.get("url").asText());
+      person.setImage(image);
+
+      person.setIsPlusUser(node.get("isPlusUser").asBoolean());
+      person.setKind(node.get("kind").asText());
+
+      JsonNode nameNode = node.get("name");
+      Person.Name name = mapper.readValue(mapper.writeValueAsString(nameNode), Person.Name.class);
+      person.setName(name);
+
+      person.setObjectType(node.get("objectType").asText());
+
+      List<Person.Organizations> organizations = Lists.newArrayList();
+      for (JsonNode orgNode : node.get("organizations")) {
+        Person.Organizations org = mapper.readValue(mapper.writeValueAsString(orgNode), Person.Organizations.class);
+        organizations.add(org);
+      }
+      person.setOrganizations(organizations);
+
+      person.setUrl(node.get("url").asText());
+      person.setVerified(node.get("verified").asBoolean());
+
+      List<Person.Emails> emails = Lists.newArrayList();
+
+      if ( node.has("emails")) {
+        for (JsonNode emailNode : node.get("emails")) {
+          Person.Emails email = mapper.readValue(mapper.writeValueAsString(emailNode), Person.Emails.class);
+          emails.add(email);
+        }
+      }
+
+      if ( node.has("tagline")) {
+        person.setTagline(node.get("tagline").asText());
+      }
+      if ( node.has("aboutMe")) {
+        person.setAboutMe(node.get("aboutMe").asText());
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to deserialize a Person object: {}", ex);
     }
+
+    return person;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GooglePlusActivityUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GooglePlusActivityUtil.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GooglePlusActivityUtil.java
index cdc7e8f..1293d18 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GooglePlusActivityUtil.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/serializer/util/GooglePlusActivityUtil.java
@@ -19,17 +19,19 @@
 
 package com.google.gplus.serializer.util;
 
-import com.google.api.services.plus.model.Comment;
-import com.google.api.services.plus.model.Person;
-import com.google.common.base.Joiner;
-import com.google.common.base.Optional;
-import com.google.common.collect.Lists;
 import org.apache.streams.exceptions.ActivitySerializerException;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Image;
 import org.apache.streams.pojo.json.Provider;
+
+import com.google.api.services.plus.model.Comment;
+import com.google.api.services.plus.model.Person;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.collect.Lists;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -39,258 +41,263 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+/**
+ * GooglePlusActivityUtil helps convert c.g.Person and c.g.Activity into o.a.s.p.j.o.Page and o.a.s.p.j.Activity.
+ */
 public class GooglePlusActivityUtil {
-    private static final Logger LOGGER = LoggerFactory.getLogger(GooglePlusActivityUtil.class);
-
-    /**
-     * Given a {@link com.google.api.services.plus.model.Person} object and an
-     * {@link org.apache.streams.pojo.json.Activity} object, fill out the appropriate details
-     *
-     * @param item
-     * @param activity
-     * @throws ActivitySerializerException
-     */
-    public static void updateActivity(Person item, Activity activity) throws ActivitySerializerException {
-        activity.setActor(buildActor(item));
-        activity.setVerb("update");
-
-        activity.setId(formatId(activity.getVerb(),
-                Optional.fromNullable(
-                        item.getId())
-                        .orNull()));
-
-        activity.setProvider(getProvider());
-    }
-
-    /**
-     * Given a {@link List} of {@link com.google.api.services.plus.model.Comment} objects and an
-     * {@link org.apache.streams.pojo.json.Activity}, update that Activity to contain all comments
-     *
-     * @param comments
-     * @param activity
-     */
-    public static void updateActivity(List<Comment> comments, Activity activity) {
-        for(Comment comment : comments) {
-            addComment(activity, comment);
-        }
 
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-        extensions.put("comment_count", comments.size());
+  private static final Logger LOGGER = LoggerFactory.getLogger(GooglePlusActivityUtil.class);
+
+  /**
+   * Given a {@link com.google.api.services.plus.model.Person} object and an
+   * {@link org.apache.streams.pojo.json.Activity} object, fill out the appropriate details.
+   *
+   * @param item Person
+   * @param activity Activity
+   * @throws ActivitySerializerException ActivitySerializerException
+   */
+  public static void updateActivity(Person item, Activity activity) throws ActivitySerializerException {
+    activity.setActor(buildActor(item));
+    activity.setVerb("update");
+
+    activity.setId(formatId(activity.getVerb(),
+        Optional.fromNullable(
+            item.getId())
+            .orNull()));
+
+    activity.setProvider(getProvider());
+  }
+
+  /**
+   * Given a {@link List} of {@link com.google.api.services.plus.model.Comment} objects and an
+   * {@link org.apache.streams.pojo.json.Activity}, update that Activity to contain all comments
+   *
+   * @param comments input List of Comment
+   * @param activity output Activity
+   */
+  public static void updateActivity(List<Comment> comments, Activity activity) {
+    for (Comment comment : comments) {
+      addComment(activity, comment);
     }
 
-    /**
-     * Given a Google Plus {@link com.google.api.services.plus.model.Activity},
-     * convert that into an Activity streams formatted {@link org.apache.streams.pojo.json.Activity}
-     *
-     * @param gPlusActivity
-     * @param activity
-     */
-    public static void updateActivity(com.google.api.services.plus.model.Activity gPlusActivity, Activity activity) {
-        activity.setActor(buildActor(gPlusActivity.getActor()));
-        activity.setVerb("post");
-        activity.setTitle(gPlusActivity.getTitle());
-        activity.setUrl(gPlusActivity.getUrl());
-        activity.setProvider(getProvider());
-
-        if(gPlusActivity.getObject() != null) {
-            activity.setContent(gPlusActivity.getObject().getContent());
-        }
-
-        activity.setId(formatId(activity.getVerb(),
-                Optional.fromNullable(
-                        gPlusActivity.getId())
-                        .orNull()));
-
-        DateTime published = new DateTime(String.valueOf(gPlusActivity.getPublished()));
-        activity.setPublished(published);
-
-        setObject(activity, gPlusActivity.getObject());
-        addGPlusExtensions(activity, gPlusActivity);
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+    extensions.put("comment_count", comments.size());
+  }
+
+  /**
+   * Given a Google Plus {@link com.google.api.services.plus.model.Activity},
+   * convert that into an Activity streams formatted {@link org.apache.streams.pojo.json.Activity}
+   *
+   * @param gPlusActivity input c.g.a.s.p.m.Activity
+   * @param activity output o.a.s.p.j.Activity
+   */
+  public static void updateActivity(com.google.api.services.plus.model.Activity gPlusActivity, Activity activity) {
+    activity.setActor(buildActor(gPlusActivity.getActor()));
+    activity.setVerb("post");
+    activity.setTitle(gPlusActivity.getTitle());
+    activity.setUrl(gPlusActivity.getUrl());
+    activity.setProvider(getProvider());
+
+    if (gPlusActivity.getObject() != null) {
+      activity.setContent(gPlusActivity.getObject().getContent());
     }
 
-    /**
-     * Adds a single {@link com.google.api.services.plus.model.Comment} to the Object.Attachments
-     * section of the passed in {@link org.apache.streams.pojo.json.Activity}
-     *
-     * @param activity
-     * @param comment
-     */
-    private static void addComment(Activity activity, Comment comment) {
-        ActivityObject obj = new ActivityObject();
-
-        obj.setId(comment.getId());
-        obj.setPublished(new DateTime(String.valueOf(comment.getPublished())));
-        obj.setUpdated(new DateTime(String.valueOf(comment.getUpdated())));
-        obj.setContent(comment.getObject().getContent());
-        obj.setObjectType(comment.getObject().getObjectType());
-
-        Map<String, Object> extensions = new HashMap<>();
-        extensions.put("googlePlus", comment);
-
-        obj.setAdditionalProperty("extensions", extensions);
-
-        if(activity.getObject() == null) {
-            activity.setObject(new ActivityObject());
-        }
-        if(activity.getObject().getAttachments() == null) {
-            activity.getObject().setAttachments(new ArrayList<ActivityObject>());
-        }
-
-        activity.getObject().getAttachments().add(obj);
+    activity.setId(formatId(activity.getVerb(),
+        Optional.fromNullable(
+            gPlusActivity.getId())
+            .orNull()));
+
+    DateTime published = new DateTime(String.valueOf(gPlusActivity.getPublished()));
+    activity.setPublished(published);
+
+    setObject(activity, gPlusActivity.getObject());
+    addGPlusExtensions(activity, gPlusActivity);
+  }
+
+  /**
+   * Adds a single {@link com.google.api.services.plus.model.Comment} to the Object.Attachments
+   * section of the passed in {@link org.apache.streams.pojo.json.Activity}
+   *
+   * @param activity output o.a.s.p.j.Activity
+   * @param comment input c.g.a.s.p.m.Comment
+   */
+  private static void addComment(Activity activity, Comment comment) {
+    ActivityObject obj = new ActivityObject();
+
+    obj.setId(comment.getId());
+    obj.setPublished(new DateTime(String.valueOf(comment.getPublished())));
+    obj.setUpdated(new DateTime(String.valueOf(comment.getUpdated())));
+    obj.setContent(comment.getObject().getContent());
+    obj.setObjectType(comment.getObject().getObjectType());
+
+    Map<String, Object> extensions = new HashMap<>();
+    extensions.put("googlePlus", comment);
+
+    obj.setAdditionalProperty("extensions", extensions);
+
+    if (activity.getObject() == null) {
+      activity.setObject(new ActivityObject());
     }
-
-    /**
-     * Add in necessary extensions from the passed in {@link com.google.api.services.plus.model.Activity} to the
-     * {@link org.apache.streams.pojo.json.Activity} object
-     *
-     * @param activity
-     * @param gPlusActivity
-     */
-    private static void addGPlusExtensions(Activity activity, com.google.api.services.plus.model.Activity gPlusActivity) {
-
-        activity.getAdditionalProperties().put("googlePlus", gPlusActivity);
-
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-
-        com.google.api.services.plus.model.Activity.PlusObject object = gPlusActivity.getObject();
-
-        if(object != null) {
-            com.google.api.services.plus.model.Activity.PlusObject.Plusoners plusoners = object.getPlusoners();
-            if(plusoners != null) {
-                Map<String, Object> likes = new HashMap<>();
-                likes.put("count", plusoners.getTotalItems());
-                extensions.put("likes", likes);
-            }
-
-            com.google.api.services.plus.model.Activity.PlusObject.Resharers resharers = object.getResharers();
-            if(resharers != null) {
-                Map<String, Object> rebroadcasts = new HashMap<>();
-                rebroadcasts.put("count", resharers.getTotalItems());
-                extensions.put("rebroadcasts", rebroadcasts);
-            }
-
-            extensions.put("keywords", object.getContent());
-        }
+    if (activity.getObject().getAttachments() == null) {
+      activity.getObject().setAttachments(new ArrayList<ActivityObject>());
     }
 
-    /**
-     * Set the {@link org.apache.streams.pojo.json.ActivityObject} field given the passed in
-     * {@link com.google.api.services.plus.model.Activity.PlusObject}
-     *
-     * @param activity
-     * @param object
-     */
-    private static void setObject(Activity activity, com.google.api.services.plus.model.Activity.PlusObject object) {
-        if(object != null) {
-            ActivityObject activityObject = new ActivityObject();
+    activity.getObject().getAttachments().add(obj);
+  }
 
-            activityObject.setContent(object.getContent());
-            activityObject.setObjectType(object.getObjectType());
+  /**
+   * Add in necessary extensions from the passed in {@link com.google.api.services.plus.model.Activity} to the
+   * {@link org.apache.streams.pojo.json.Activity} object
+   *
+   * @param activity output o.a.s.p.j.Activity
+   * @param gPlusActivity input c.g.a.s.p.m.Activity
+   */
+  private static void addGPlusExtensions(Activity activity, com.google.api.services.plus.model.Activity gPlusActivity) {
 
-            java.util.List<ActivityObject> attachmentsList = new ArrayList<>();
-            for (com.google.api.services.plus.model.Activity.PlusObject.Attachments attachments : object.getAttachments()) {
-                ActivityObject attach = new ActivityObject();
+    activity.getAdditionalProperties().put("googlePlus", gPlusActivity);
 
-                attach.setContent(attachments.getContent());
-                attach.setDisplayName(attachments.getDisplayName());
-                attach.setObjectType(attachments.getObjectType());
-                attach.setUrl(attachments.getUrl());
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
 
-                Image image = new Image();
-                com.google.api.services.plus.model.Activity.PlusObject.Attachments.Image image1 = attachments.getImage();
+    com.google.api.services.plus.model.Activity.PlusObject object = gPlusActivity.getObject();
 
-                if (image1 != null) {
-                    image.setUrl(image1.getUrl());
-                    attach.setImage(image);
-                }
+    if (object != null) {
+      com.google.api.services.plus.model.Activity.PlusObject.Plusoners plusoners = object.getPlusoners();
+      if (plusoners != null) {
+        Map<String, Object> likes = new HashMap<>();
+        likes.put("count", plusoners.getTotalItems());
+        extensions.put("likes", likes);
+      }
 
-                attachmentsList.add(attach);
-            }
+      com.google.api.services.plus.model.Activity.PlusObject.Resharers resharers = object.getResharers();
+      if (resharers != null) {
+        Map<String, Object> rebroadcasts = new HashMap<>();
+        rebroadcasts.put("count", resharers.getTotalItems());
+        extensions.put("rebroadcasts", rebroadcasts);
+      }
 
-            activityObject.setAttachments(attachmentsList);
-
-            activity.setObject(activityObject);
-        }
+      extensions.put("keywords", object.getContent());
     }
-
-    /**
-     * Given a {@link com.google.api.services.plus.model.Activity.Actor} object, return a fully fleshed
-     * out {@link org.apache.streams.pojo.json.ActivityObject} actor
-     *
-     * @param gPlusActor
-     * @return {@link ActivityObject}
-     */
-    private static ActivityObject buildActor(com.google.api.services.plus.model.Activity.Actor gPlusActor) {
-        ActivityObject actor = new ActivityObject();
-
-        actor.setDisplayName(gPlusActor.getDisplayName());
-        actor.setId(formatId(String.valueOf(gPlusActor.getId())));
-        actor.setUrl(gPlusActor.getUrl());
+  }
+
+  /**
+   * Set the {@link org.apache.streams.pojo.json.ActivityObject} field given the passed in
+   * {@link com.google.api.services.plus.model.Activity.PlusObject}
+   *
+   * @param activity output $.object as o.a.s.p.j.ActivityObject
+   * @param plusObject input c.g.a.s.p.m.Activity.PlusObject
+   */
+  private static void setObject(Activity activity, com.google.api.services.plus.model.Activity.PlusObject plusObject) {
+    if (plusObject != null) {
+      ActivityObject activityObject = new ActivityObject();
+
+      activityObject.setContent(plusObject.getContent());
+      activityObject.setObjectType(plusObject.getObjectType());
+
+      java.util.List<ActivityObject> attachmentsList = new ArrayList<>();
+      for (com.google.api.services.plus.model.Activity.PlusObject.Attachments attachments : plusObject.getAttachments()) {
+        ActivityObject attach = new ActivityObject();
+
+        attach.setContent(attachments.getContent());
+        attach.setDisplayName(attachments.getDisplayName());
+        attach.setObjectType(attachments.getObjectType());
+        attach.setUrl(attachments.getUrl());
 
         Image image = new Image();
-        com.google.api.services.plus.model.Activity.Actor.Image googlePlusImage = gPlusActor.getImage();
+        com.google.api.services.plus.model.Activity.PlusObject.Attachments.Image image1 = attachments.getImage();
 
-        if(googlePlusImage != null) {
-            image.setUrl(googlePlusImage.getUrl());
+        if (image1 != null) {
+          image.setUrl(image1.getUrl());
+          attach.setImage(image);
         }
-        actor.setImage(image);
 
-        return actor;
-    }
-    /**
-     * Extract the relevant details from the passed in {@link com.google.api.services.plus.model.Person} object and build
-     * an actor with them
-     *
-     * @param person
-     * @return Actor constructed with relevant Person details
-     */
-    private static ActivityObject buildActor(Person person) {
-        ActivityObject actor = new ActivityObject();
-
-        actor.setUrl(person.getUrl());
-        actor.setDisplayName(person.getDisplayName());
-        actor.setId(formatId(String.valueOf(person.getId())));
-
-        if(person.getAboutMe() != null) {
-            actor.setSummary(person.getAboutMe());
-        } else if(person.getTagline() != null) {
-            actor.setSummary(person.getTagline());
-        }
-
-        Image image = new Image();
-        Person.Image googlePlusImage = person.getImage();
-
-        if(googlePlusImage != null) {
-            image.setUrl(googlePlusImage.getUrl());
-        }
-        actor.setImage(image);
+        attachmentsList.add(attach);
+      }
 
-        Map<String, Object> extensions = new HashMap<>();
+      activityObject.setAttachments(attachmentsList);
 
-        extensions.put("followers", person.getCircledByCount());
-        extensions.put("googleplus", person);
-        actor.setAdditionalProperty("extensions", extensions);
-
-        return actor;
+      activity.setObject(activityObject);
     }
-
-    /**
-     * Gets the common googleplus {@link org.apache.streams.pojo.json.Provider} object
-     * @return a provider object representing GooglePlus
-     */
-    public static Provider getProvider() {
-        Provider provider = new Provider();
-        provider.setId("id:providers:googleplus");
-        provider.setDisplayName("GooglePlus");
-        return provider;
+  }
+
+  /**
+   * Given a {@link com.google.api.services.plus.model.Activity.Actor} object, return a fully fleshed
+   * out {@link org.apache.streams.pojo.json.ActivityObject} actor
+   *
+   * @param gPlusActor input c.g.a.s.p.m.Activity.Actor
+   * @return {@link ActivityObject} output $.actor as o.a.s.p.j.ActivityObject
+   */
+  private static ActivityObject buildActor(com.google.api.services.plus.model.Activity.Actor gPlusActor) {
+    ActivityObject actor = new ActivityObject();
+
+    actor.setDisplayName(gPlusActor.getDisplayName());
+    actor.setId(formatId(String.valueOf(gPlusActor.getId())));
+    actor.setUrl(gPlusActor.getUrl());
+
+    Image image = new Image();
+    com.google.api.services.plus.model.Activity.Actor.Image googlePlusImage = gPlusActor.getImage();
+
+    if (googlePlusImage != null) {
+      image.setUrl(googlePlusImage.getUrl());
     }
+    actor.setImage(image);
+
+    return actor;
+  }
+
+  /**
+   * Extract the relevant details from the passed in {@link com.google.api.services.plus.model.Person} object and build
+   * an actor with them
+   *
+   * @param person Person
+   * @return Actor constructed with relevant Person details
+   */
+  private static ActivityObject buildActor(Person person) {
+    ActivityObject actor = new ActivityObject();
+
+    actor.setUrl(person.getUrl());
+    actor.setDisplayName(person.getDisplayName());
+    actor.setId(formatId(String.valueOf(person.getId())));
+
+    if (person.getAboutMe() != null) {
+      actor.setSummary(person.getAboutMe());
+    } else if (person.getTagline() != null) {
+      actor.setSummary(person.getTagline());
+    }
+
+    Image image = new Image();
+    Person.Image googlePlusImage = person.getImage();
 
-    /**
-     * Formats the ID to conform with the Apache Streams activity ID convention
-     * @param idparts the parts of the ID to join
-     * @return a valid Activity ID in format "id:googleplus:part1:part2:...partN"
-     */
-    public static String formatId(String... idparts) {
-        return Joiner.on(":").join(Lists.asList("id:googleplus", idparts));
+    if (googlePlusImage != null) {
+      image.setUrl(googlePlusImage.getUrl());
     }
+    actor.setImage(image);
+
+    Map<String, Object> extensions = new HashMap<>();
+
+    extensions.put("followers", person.getCircledByCount());
+    extensions.put("googleplus", person);
+    actor.setAdditionalProperty("extensions", extensions);
+
+    return actor;
+  }
+
+  /**
+   * Gets the common googleplus {@link org.apache.streams.pojo.json.Provider} object
+   * @return a provider object representing GooglePlus
+   */
+  public static Provider getProvider() {
+    Provider provider = new Provider();
+    provider.setId("id:providers:googleplus");
+    provider.setDisplayName("GooglePlus");
+    return provider;
+  }
+
+  /**
+   * Formats the ID to conform with the Apache Streams activity ID convention
+   * @param idparts the parts of the ID to join
+   * @return a valid Activity ID in format "id:googleplus:part1:part2:...partN"
+   */
+  public static String formatId(String... idparts) {
+    return Joiner.on(":").join(Lists.asList("id:googleplus", idparts));
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusCommentSerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusCommentSerDeIT.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusCommentSerDeIT.java
index df15251..28b4db8 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusCommentSerDeIT.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusCommentSerDeIT.java
@@ -19,6 +19,9 @@
 
 package com.google.gplus;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.module.SimpleModule;
@@ -26,10 +29,9 @@ import com.google.api.client.util.Lists;
 import com.google.api.services.plus.model.Comment;
 import com.google.gplus.serializer.util.GPlusCommentDeserializer;
 import com.google.gplus.serializer.util.GooglePlusActivityUtil;
+
 import org.apache.commons.lang.StringUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
-import org.junit.*;
+import org.junit.Before;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,74 +46,77 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 
 /**
- * Tests conversion of gplus inputs to Activity
+ * Tests conversion of gplus inputs to Activity.
  */
 public class GooglePlusCommentSerDeIT {
-    private final static Logger LOGGER = LoggerFactory.getLogger(GooglePlusCommentSerDeIT.class);
-    private ObjectMapper objectMapper;
-    private GooglePlusActivityUtil googlePlusActivityUtil;
-
-    @Before
-    public void setup() {
-        objectMapper = StreamsJacksonMapper.getInstance();
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Comment.class, new GPlusCommentDeserializer());
-        objectMapper.registerModule(simpleModule);
-        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-
-        googlePlusActivityUtil = new GooglePlusActivityUtil();
-    }
-
-    @org.junit.Test
-    public void testCommentObjects() {
-        InputStream is = GooglePlusCommentSerDeIT.class.getResourceAsStream("/google_plus_comments_jsons.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
-
-        Activity activity = new Activity();
-        List<Comment> comments = Lists.newArrayList();
-
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                if (!StringUtils.isEmpty(line)) {
-                    LOGGER.info("raw: {}", line);
-                    Comment comment = objectMapper.readValue(line, Comment.class);
-
-                    LOGGER.info("comment: {}", comment);
-
-                    assertNotNull(comment);
-                    assertNotNull(comment.getEtag());
-                    assertNotNull(comment.getId());
-                    assertNotNull(comment.getInReplyTo());
-                    assertNotNull(comment.getObject());
-                    assertNotNull(comment.getPlusoners());
-                    assertNotNull(comment.getPublished());
-                    assertNotNull(comment.getUpdated());
-                    assertNotNull(comment.getSelfLink());
-                    assertEquals(comment.getVerb(), "post");
-
-                    comments.add(comment);
-                }
-            }
-
-            assertEquals(comments.size(), 3);
-
-            googlePlusActivityUtil.updateActivity(comments, activity);
-            assertNotNull(activity);
-            assertNotNull(activity.getObject());
-            assertEquals(activity.getObject().getAttachments().size(), 3);
-        } catch (Exception e) {
-            LOGGER.error("Exception while testing serializability: {}", e);
+  private static final Logger LOGGER = LoggerFactory.getLogger(GooglePlusCommentSerDeIT.class);
+  private ObjectMapper objectMapper;
+  private GooglePlusActivityUtil googlePlusActivityUtil;
+
+  /**
+   * setup.
+   */
+  @Before
+  public void setup() {
+    objectMapper = StreamsJacksonMapper.getInstance();
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Comment.class, new GPlusCommentDeserializer());
+    objectMapper.registerModule(simpleModule);
+    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+
+    googlePlusActivityUtil = new GooglePlusActivityUtil();
+  }
+
+  @org.junit.Test
+  public void testCommentObjects() {
+    InputStream is = GooglePlusCommentSerDeIT.class.getResourceAsStream("/google_plus_comments_jsons.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
+
+    Activity activity = new Activity();
+    List<Comment> comments = Lists.newArrayList();
+
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        if (!StringUtils.isEmpty(line)) {
+          LOGGER.info("raw: {}", line);
+          Comment comment = objectMapper.readValue(line, Comment.class);
+
+          LOGGER.info("comment: {}", comment);
+
+          assertNotNull(comment);
+          assertNotNull(comment.getEtag());
+          assertNotNull(comment.getId());
+          assertNotNull(comment.getInReplyTo());
+          assertNotNull(comment.getObject());
+          assertNotNull(comment.getPlusoners());
+          assertNotNull(comment.getPublished());
+          assertNotNull(comment.getUpdated());
+          assertNotNull(comment.getSelfLink());
+          assertEquals(comment.getVerb(), "post");
+
+          comments.add(comment);
         }
+      }
+
+      assertEquals(comments.size(), 3);
+
+      googlePlusActivityUtil.updateActivity(comments, activity);
+      assertNotNull(activity);
+      assertNotNull(activity.getObject());
+      assertEquals(activity.getObject().getAttachments().size(), 3);
+    } catch (Exception ex) {
+      LOGGER.error("Exception while testing serializability: {}", ex);
     }
+  }
 
-    @org.junit.Test
-    public void testEmptyComments() {
-        Activity activity = new Activity();
+  @org.junit.Test
+  public void testEmptyComments() {
+    Activity activity = new Activity();
 
-        googlePlusActivityUtil.updateActivity(new ArrayList<Comment>(), activity);
+    googlePlusActivityUtil.updateActivity(new ArrayList<Comment>(), activity);
 
-        assertNull(activity.getObject());
-    }
+    assertNull(activity.getObject());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusPersonSerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusPersonSerDeIT.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusPersonSerDeIT.java
index 0b101e9..be54aa1 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusPersonSerDeIT.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/GooglePlusPersonSerDeIT.java
@@ -19,18 +19,20 @@
 
 package com.google.gplus;
 
-import com.fasterxml.jackson.databind.*;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+import org.apache.streams.pojo.json.ActivityObject;
+import org.apache.streams.pojo.json.Provider;
+
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import com.google.api.services.plus.model.Person;
 import com.google.gplus.serializer.util.GPlusPersonDeserializer;
 import com.google.gplus.serializer.util.GooglePlusActivityUtil;
+
 import org.apache.commons.lang.StringUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
-import org.apache.streams.pojo.json.Provider;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -43,59 +45,62 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
 /**
- * Tests conversion of gplus inputs to Activity
+ * Tests conversion of gplus inputs to Activity.
  */
 public class GooglePlusPersonSerDeIT {
-    private final static Logger LOGGER = LoggerFactory.getLogger(GooglePlusPersonSerDeIT.class);
-    private ObjectMapper objectMapper;
-    private GooglePlusActivityUtil googlePlusActivityUtil;
-
-    @Before
-    public void setup() {
-        objectMapper = StreamsJacksonMapper.getInstance();
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Person.class, new GPlusPersonDeserializer());
-        objectMapper.registerModule(simpleModule);
-        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-
-        googlePlusActivityUtil = new GooglePlusActivityUtil();
-    }
+  private static final Logger LOGGER = LoggerFactory.getLogger(GooglePlusPersonSerDeIT.class);
+  private ObjectMapper objectMapper;
+  private GooglePlusActivityUtil googlePlusActivityUtil;
+
+  /**
+   * setup.
+   */
+  @Before
+  public void setup() {
+    objectMapper = StreamsJacksonMapper.getInstance();
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Person.class, new GPlusPersonDeserializer());
+    objectMapper.registerModule(simpleModule);
+    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+
+    googlePlusActivityUtil = new GooglePlusActivityUtil();
+  }
+
+  @Test
+  public void testPersonObjects() {
+    InputStream is = GooglePlusPersonSerDeIT.class.getResourceAsStream("/google_plus_person_jsons.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
+
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        if (!StringUtils.isEmpty(line)) {
+          LOGGER.info("raw: {}", line);
+          Activity activity = new Activity();
+
+          Person person = objectMapper.readValue(line, Person.class);
+
+          googlePlusActivityUtil.updateActivity(person, activity);
+          LOGGER.info("activity: {}", activity);
+
+          assertNotNull(activity);
+          assert (activity.getId().contains("id:googleplus:update"));
+          assertEquals(activity.getVerb(), "update");
+
+          Provider provider = activity.getProvider();
+          assertEquals(provider.getId(), "id:providers:googleplus");
+          assertEquals(provider.getDisplayName(), "GooglePlus");
+
+          ActivityObject actor = activity.getActor();
+          assertNotNull(actor.getImage());
+          assert (actor.getId().contains("id:googleplus:"));
+          assertNotNull(actor.getUrl());
 
-    @Test
-    public void TestPersonObjects() {
-        InputStream is = GooglePlusPersonSerDeIT.class.getResourceAsStream("/google_plus_person_jsons.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
-
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                if (!StringUtils.isEmpty(line)) {
-                    LOGGER.info("raw: {}", line);
-                    Activity activity = new Activity();
-
-                    Person person = objectMapper.readValue(line, Person.class);
-
-                    googlePlusActivityUtil.updateActivity(person, activity);
-                    LOGGER.info("activity: {}", activity);
-
-                    assertNotNull(activity);
-                    assert(activity.getId().contains("id:googleplus:update"));
-                    assertEquals(activity.getVerb(), "update");
-
-                    Provider provider = activity.getProvider();
-                    assertEquals(provider.getId(), "id:providers:googleplus");
-                    assertEquals(provider.getDisplayName(), "GooglePlus");
-
-                    ActivityObject actor = activity.getActor();
-                    assertNotNull(actor.getImage());
-                    assert(actor.getId().contains("id:googleplus:"));
-                    assertNotNull(actor.getUrl());
-
-                }
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while testing serializability: {}", e);
         }
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while testing serializability: {}", ex);
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusActivitySerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusActivitySerDeIT.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusActivitySerDeIT.java
index 8ffec0b..d86001c 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusActivitySerDeIT.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusActivitySerDeIT.java
@@ -19,17 +19,19 @@
 
 package com.google.gplus.processor;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.extensions.ExtensionUtil;
+import org.apache.streams.pojo.json.Activity;
+import org.apache.streams.pojo.json.ActivityObject;
+import org.apache.streams.pojo.json.Provider;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import com.google.gplus.serializer.util.GPlusActivityDeserializer;
 import com.google.gplus.serializer.util.GooglePlusActivityUtil;
+
 import org.apache.commons.lang.StringUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.extensions.ExtensionUtil;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
-import org.apache.streams.pojo.json.Provider;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -44,71 +46,75 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
 /**
- * Tests conversion of gplus inputs to Activity
+ * Tests conversion of gplus inputs to Activity.
  */
 public class GooglePlusActivitySerDeIT {
-    private final static Logger LOGGER = LoggerFactory.getLogger(GooglePlusActivitySerDeIT.class);
-    private ObjectMapper objectMapper;
-
-    @Before
-    public void setup() {
-        objectMapper = StreamsJacksonMapper.getInstance();
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(com.google.api.services.plus.model.Activity.class, new GPlusActivityDeserializer());
-        objectMapper.registerModule(simpleModule);
-        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-    }
-
-    @Test
-    @SuppressWarnings("unchecked")
-    public void TestActivityObjects() {
-        InputStream is = GooglePlusActivitySerDeIT.class.getResourceAsStream("/google_plus_activity_jsons.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
-
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                if (!StringUtils.isEmpty(line)) {
-                    LOGGER.info("raw: {}", line);
-                    Activity activity = new Activity();
-
-                    com.google.api.services.plus.model.Activity gPlusActivity = objectMapper.readValue(line, com.google.api.services.plus.model.Activity.class);
-
-                    GooglePlusActivityUtil.updateActivity(gPlusActivity, activity);
-                    LOGGER.info("activity: {}", activity);
-
-                    assertNotNull(activity);
-                    assert(activity.getId().contains("id:googleplus:post"));
-                    assertEquals(activity.getVerb(), "post");
-
-                    Provider provider = activity.getProvider();
-                    assertEquals(provider.getId(), "id:providers:googleplus");
-                    assertEquals(provider.getDisplayName(), "GooglePlus");
-
-                    ActivityObject actor = activity.getActor();
-                    assertNotNull(actor.getImage());
-                    assert(actor.getId().contains("id:googleplus:"));
-                    assertNotNull(actor.getUrl());
-
-                    assertNotNull(activity.getPublished());
-                    assertNotNull(activity.getTitle());
-                    assertNotNull(activity.getUrl());
-
-                    Map<String, Object> extensions = ExtensionUtil.getInstance().getExtensions(activity);
-                    assertNotNull(extensions);
-
-                    if(activity.getContent() != null) {
-                        assertNotNull(extensions.get("rebroadcasts"));
-                        assertNotNull(extensions.get("keywords"));
-                        assertNotNull(extensions.get("likes"));
-                        assert (((Map<String, Object>) extensions.get("rebroadcasts")).containsKey("count"));
-                        assert (((Map<String, Object>) extensions.get("likes")).containsKey("count"));
-                    }
-                }
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while testing serializability: {}", e);
+  private static final Logger LOGGER = LoggerFactory.getLogger(GooglePlusActivitySerDeIT.class);
+  private ObjectMapper objectMapper;
+
+  /**
+   * setup.
+   */
+  @Before
+  public void setup() {
+    objectMapper = StreamsJacksonMapper.getInstance();
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(com.google.api.services.plus.model.Activity.class, new GPlusActivityDeserializer());
+    objectMapper.registerModule(simpleModule);
+    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testActivityObjects() {
+    InputStream is = GooglePlusActivitySerDeIT.class.getResourceAsStream("/google_plus_activity_jsons.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
+
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        if (!StringUtils.isEmpty(line)) {
+          LOGGER.info("raw: {}", line);
+          Activity activity = new Activity();
+
+          com.google.api.services.plus.model.Activity googlePlusActivity =
+              objectMapper.readValue(line, com.google.api.services.plus.model.Activity.class);
+
+          GooglePlusActivityUtil.updateActivity(googlePlusActivity, activity);
+          LOGGER.info("activity: {}", activity);
+
+          assertNotNull(activity);
+          assert (activity.getId().contains("id:googleplus:post"));
+          assertEquals(activity.getVerb(), "post");
+
+          Provider provider = activity.getProvider();
+          assertEquals(provider.getId(), "id:providers:googleplus");
+          assertEquals(provider.getDisplayName(), "GooglePlus");
+
+          ActivityObject actor = activity.getActor();
+          assertNotNull(actor.getImage());
+          assert (actor.getId().contains("id:googleplus:"));
+          assertNotNull(actor.getUrl());
+
+          assertNotNull(activity.getPublished());
+          assertNotNull(activity.getTitle());
+          assertNotNull(activity.getUrl());
+
+          Map<String, Object> extensions = ExtensionUtil.getInstance().getExtensions(activity);
+          assertNotNull(extensions);
+
+          if (activity.getContent() != null) {
+            assertNotNull(extensions.get("rebroadcasts"));
+            assertNotNull(extensions.get("keywords"));
+            assertNotNull(extensions.get("likes"));
+            assert (((Map<String, Object>) extensions.get("rebroadcasts")).containsKey("count"));
+            assert (((Map<String, Object>) extensions.get("likes")).containsKey("count"));
+          }
         }
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while testing serializability: {}", ex);
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusTypeConverterTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusTypeConverterTest.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusTypeConverterTest.java
index 59bb6bf..c27351c 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusTypeConverterTest.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/processor/GooglePlusTypeConverterTest.java
@@ -18,6 +18,11 @@
 
 package com.google.gplus.processor;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.exceptions.ActivitySerializerException;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.module.SimpleModule;
@@ -25,11 +30,8 @@ import com.google.api.services.plus.model.Person;
 import com.google.gplus.serializer.util.GPlusActivityDeserializer;
 import com.google.gplus.serializer.util.GPlusPersonDeserializer;
 import com.google.gplus.serializer.util.GooglePlusActivityUtil;
+
 import org.apache.commons.lang.StringUtils;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.exceptions.ActivitySerializerException;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -46,85 +48,89 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
 /**
- * Tests conversion of gplus inputs to Activity
+ * Tests conversion of gplus inputs to Activity.
  */
 @Ignore("ignore until test resources are available.")
 public class GooglePlusTypeConverterTest {
-    private final static Logger LOGGER = LoggerFactory.getLogger(GooglePlusTypeConverterTest.class);
-    private GooglePlusTypeConverter googlePlusTypeConverter;
-    private ObjectMapper objectMapper;
-
-    @Before
-    public void setup() {
-        objectMapper = StreamsJacksonMapper.getInstance();
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Person.class, new GPlusPersonDeserializer());
-        simpleModule.addDeserializer(com.google.api.services.plus.model.Activity.class, new GPlusActivityDeserializer());
-        objectMapper.registerModule(simpleModule);
-        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-
-        googlePlusTypeConverter = new GooglePlusTypeConverter();
-        googlePlusTypeConverter.prepare(null);
+  private static final Logger LOGGER = LoggerFactory.getLogger(GooglePlusTypeConverterTest.class);
+  private GooglePlusTypeConverter googlePlusTypeConverter;
+  private ObjectMapper objectMapper;
+
+  /**
+   * setup.
+   */
+  @Before
+  public void setup() {
+    objectMapper = StreamsJacksonMapper.getInstance();
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Person.class, new GPlusPersonDeserializer());
+    simpleModule.addDeserializer(com.google.api.services.plus.model.Activity.class, new GPlusActivityDeserializer());
+    objectMapper.registerModule(simpleModule);
+    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+
+    googlePlusTypeConverter = new GooglePlusTypeConverter();
+    googlePlusTypeConverter.prepare(null);
+  }
+
+  @Test
+  public void testProcessPerson() throws IOException, ActivitySerializerException {
+    InputStream is = GooglePlusTypeConverterTest.class.getResourceAsStream("/google_plus_person_jsons.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
+
+    while (br.ready()) {
+      String line = br.readLine();
+      if (!StringUtils.isEmpty(line)) {
+        LOGGER.info("raw: {}", line);
+        Activity activity = new Activity();
+
+        Person person = objectMapper.readValue(line, Person.class);
+        StreamsDatum streamsDatum = new StreamsDatum(person);
+
+        assertNotNull(streamsDatum.getDocument());
+
+        List<StreamsDatum> retList = googlePlusTypeConverter.process(streamsDatum);
+        GooglePlusActivityUtil.updateActivity(person, activity);
+
+        assertEquals(retList.size(), 1);
+        assert (retList.get(0).getDocument() instanceof Activity);
+        assertEquals(activity, retList.get(0).getDocument());
+      }
     }
+  }
 
-    @Test
-    public void testProcessPerson() throws IOException, ActivitySerializerException {
-        InputStream is = GooglePlusTypeConverterTest.class.getResourceAsStream("/google_plus_person_jsons.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+  @Test
+  public void testProcessActivity() throws IOException, ActivitySerializerException {
+    InputStream is = GooglePlusTypeConverterTest.class.getResourceAsStream("/google_plus_activity_jsons.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
 
-        while (br.ready()) {
-            String line = br.readLine();
-            if (!StringUtils.isEmpty(line)) {
-                LOGGER.info("raw: {}", line);
-                Activity activity = new Activity();
+    while (br.ready()) {
+      String line = br.readLine();
+      if (!StringUtils.isEmpty(line)) {
+        LOGGER.info("raw: {}", line);
+        Activity activity = new Activity();
 
-                Person person = objectMapper.readValue(line, Person.class);
-                StreamsDatum streamsDatum = new StreamsDatum(person);
+        com.google.api.services.plus.model.Activity gPlusActivity =
+            objectMapper.readValue(line, com.google.api.services.plus.model.Activity.class);
+        StreamsDatum streamsDatum = new StreamsDatum(gPlusActivity);
 
-                assertNotNull(streamsDatum.getDocument());
+        assertNotNull(streamsDatum.getDocument());
 
-                List<StreamsDatum> retList = googlePlusTypeConverter.process(streamsDatum);
-                GooglePlusActivityUtil.updateActivity(person, activity);
+        List<StreamsDatum> retList = googlePlusTypeConverter.process(streamsDatum);
+        GooglePlusActivityUtil.updateActivity(gPlusActivity, activity);
 
-                assertEquals(retList.size(), 1);
-                assert(retList.get(0).getDocument() instanceof Activity);
-                assertEquals(activity, retList.get(0).getDocument());
-            }
-        }
+        assertEquals(retList.size(), 1);
+        assert (retList.get(0).getDocument() instanceof Activity);
+        assertEquals(activity, retList.get(0).getDocument());
+      }
     }
+  }
 
-    @Test
-    public void testProcessActivity() throws IOException, ActivitySerializerException{
-        InputStream is = GooglePlusTypeConverterTest.class.getResourceAsStream("/google_plus_activity_jsons.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
-
-        while (br.ready()) {
-            String line = br.readLine();
-            if (!StringUtils.isEmpty(line)) {
-                LOGGER.info("raw: {}", line);
-                Activity activity = new Activity();
-
-                com.google.api.services.plus.model.Activity gPlusActivity = objectMapper.readValue(line, com.google.api.services.plus.model.Activity.class);
-                StreamsDatum streamsDatum = new StreamsDatum(gPlusActivity);
-
-                assertNotNull(streamsDatum.getDocument());
-
-                List<StreamsDatum> retList = googlePlusTypeConverter.process(streamsDatum);
-                GooglePlusActivityUtil.updateActivity(gPlusActivity, activity);
+  @Test
+  public void testEmptyProcess() {
+    List<StreamsDatum> retList = googlePlusTypeConverter.process(null);
 
-                assertEquals(retList.size(), 1);
-                assert(retList.get(0).getDocument() instanceof Activity);
-                assertEquals(activity, retList.get(0).getDocument());
-            }
-        }
-    }
-
-    @Test
-    public void testEmptyProcess() {
-        List<StreamsDatum> retList = googlePlusTypeConverter.process(null);
-
-        assertEquals(retList.size(), 0);
-    }
+    assertEquals(retList.size(), 0);
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestAbstractGPlusProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestAbstractGPlusProvider.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestAbstractGPlusProvider.java
index f7b3dfb..4cae4c0 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestAbstractGPlusProvider.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestAbstractGPlusProvider.java
@@ -19,82 +19,83 @@
 
 package com.google.gplus.provider;
 
-import com.carrotsearch.randomizedtesting.RandomizedTest;
-import com.carrotsearch.randomizedtesting.annotations.Repeat;
-import com.google.api.services.plus.Plus;
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.google.gplus.GPlusConfiguration;
 import org.apache.streams.google.gplus.GPlusOAuthConfiguration;
 import org.apache.streams.google.gplus.configuration.UserInfo;
 import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
+import com.carrotsearch.randomizedtesting.RandomizedTest;
+import com.carrotsearch.randomizedtesting.annotations.Repeat;
+import com.google.api.services.plus.Plus;
+import com.google.common.collect.Lists;
+
 import org.junit.Test;
 
 import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.BlockingQueue;
 
-import static org.junit.Assert.fail;
 import static org.mockito.Mockito.mock;
 
 /**
  * Unit tests for {@link com.google.gplus.provider.AbstractGPlusProvider}
  */
-public class TestAbstractGPlusProvider extends RandomizedTest{
+public class TestAbstractGPlusProvider extends RandomizedTest {
 
-    /**
-     * Test that every collector will be run and that data queued from the collectors will be processed.
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testDataCollectorRunsPerUser() {
-        int numUsers = randomIntBetween(1, 1000);
-        List<UserInfo> userList = Lists.newLinkedList();
-        for(int i=0; i < numUsers; ++i) {
-            userList.add(new UserInfo());
-        }
-        GPlusConfiguration config = new GPlusConfiguration();
-        GPlusOAuthConfiguration oauth = new GPlusOAuthConfiguration();
-        oauth.setAppName("a");
-        oauth.setPathToP12KeyFile("a");
-        oauth.setServiceAccountEmailAddress("a");
-        config.setOauth(oauth);
-        config.setGooglePlusUsers(userList);
-        AbstractGPlusProvider provider = new AbstractGPlusProvider(config) {
+  /**
+   * Test that every collector will be run and that data queued from the collectors will be processed.
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testDataCollectorRunsPerUser() {
+    int numUsers = randomIntBetween(1, 1000);
+    List<UserInfo> userList = Lists.newLinkedList();
+    for (int i = 0; i < numUsers; ++i) {
+      userList.add(new UserInfo());
+    }
+    GPlusConfiguration config = new GPlusConfiguration();
+    GPlusOAuthConfiguration oauth = new GPlusOAuthConfiguration();
+    oauth.setAppName("a");
+    oauth.setPathToP12KeyFile("a");
+    oauth.setServiceAccountEmailAddress("a");
+    config.setOauth(oauth);
+    config.setGooglePlusUsers(userList);
+    AbstractGPlusProvider provider = new AbstractGPlusProvider(config) {
 
-            @Override
-            protected Plus createPlusClient() throws IOException {
-                return mock(Plus.class);
-            }
+      @Override
+      protected Plus createPlusClient() throws IOException {
+        return mock(Plus.class);
+      }
 
-            @Override
-            protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, Plus plus, UserInfo userInfo) {
-                final BlockingQueue<StreamsDatum> q = queue;
-                return new Runnable() {
-                    @Override
-                    public void run() {
-                        try {
-                            q.put(new StreamsDatum(null));
-                        } catch (InterruptedException ie) {
-                            fail("Test was interrupted");
-                        }
-                    }
-                };
+      @Override
+      protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, Plus plus, UserInfo userInfo) {
+        final BlockingQueue<StreamsDatum> q = queue;
+        return new Runnable() {
+          @Override
+          public void run() {
+            try {
+              q.put(new StreamsDatum(null));
+            } catch (InterruptedException ie) {
+              fail("Test was interrupted");
             }
+          }
         };
+      }
+    };
 
-        try {
-            provider.prepare(null);
-            provider.startStream();
-            int datumCount = 0;
-            while(provider.isRunning()) {
-                datumCount += provider.readCurrent().size();
-            }
-            assertEquals(numUsers, datumCount);
-        } finally {
-            provider.cleanUp();
-        }
+    try {
+      provider.prepare(null);
+      provider.startStream();
+      int datumCount = 0;
+      while (provider.isRunning()) {
+        datumCount += provider.readCurrent().size();
+      }
+      assertEquals(numUsers, datumCount);
+    } finally {
+      provider.cleanUp();
     }
+  }
 
 
 }


[20/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProvider.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProvider.java
index 66c1104..2527d29 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProvider.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProvider.java
@@ -18,28 +18,26 @@
 
 package org.apache.streams.twitter.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfiguration;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsResultSet;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.twitter.TwitterFollowingConfiguration;
+import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
-import com.google.common.collect.Queues;
 import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.Uninterruptibles;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigParseOptions;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.DatumStatusCounter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsResultSet;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.twitter.TwitterFollowingConfiguration;
-import org.apache.streams.twitter.TwitterStreamConfiguration;
-import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
-import org.apache.streams.util.ComponentUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import twitter4j.Twitter;
@@ -51,162 +49,184 @@ import java.io.PrintStream;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Queue;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 /**
- * Created by sblackmon on 11/25/14.
+ * Retrieve all follow adjacencies from a list of user ids or names.
  */
 public class TwitterFollowingProvider extends TwitterUserInformationProvider {
 
-    public static final String STREAMS_ID = "TwitterFollowingProvider";
-    private static final Logger LOGGER = LoggerFactory.getLogger(TwitterFollowingProvider.class);
-
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
-
-    private TwitterFollowingConfiguration config;
-
-    List<ListenableFuture<Object>> futures = new ArrayList<>();
-
-    public static void main(String[] args) throws Exception {
+  public static final String STREAMS_ID = "TwitterFollowingProvider";
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterFollowingProvider.class);
+
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+
+  private TwitterFollowingConfiguration config;
+
+  List<ListenableFuture<Object>> futures = new ArrayList<>();
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply (at least) the following required configuration in application.conf:
+   *
+   * <p/>
+   * twitter.oauth.consumerKey
+   * twitter.oauth.consumerSecret
+   * twitter.oauth.accessToken
+   * twitter.oauth.accessTokenSecret
+   * twitter.info
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.twitter.provider.TwitterFollowingProvider -Dexec.args="application.conf tweets.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    TwitterFollowingConfiguration config = new ComponentConfigurator<>(TwitterFollowingConfiguration.class).detectConfiguration(typesafe, "twitter");
+    TwitterFollowingProvider provider = new TwitterFollowingProvider(config);
+
+    ObjectMapper mapper = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          json = mapper.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
+        }
+      }
+    }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 
-        Preconditions.checkArgument(args.length >= 2);
+  public TwitterFollowingConfiguration getConfig() {
+    return config;
+  }
 
-        String configfile = args[0];
-        String outfile = args[1];
+  public static final int MAX_NUMBER_WAITING = 10000;
 
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+  public TwitterFollowingProvider() {
+    this.config = new ComponentConfigurator<>(TwitterFollowingConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("twitter"));
+  }
 
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+  public TwitterFollowingProvider(TwitterFollowingConfiguration config) {
+    super(config);
+    this.config = config;
+  }
 
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        TwitterFollowingConfiguration config = new ComponentConfigurator<>(TwitterFollowingConfiguration.class).detectConfiguration(typesafe, "twitter");
-        TwitterFollowingProvider provider = new TwitterFollowingProvider(config);
+  @Override
+  public void prepare(Object configurationObject) {
+    super.prepare(config);
+    Preconditions.checkNotNull(getConfig().getEndpoint());
+    Preconditions.checkArgument(getConfig().getEndpoint().equals("friends") || getConfig().getEndpoint().equals("followers"));
+    return;
+  }
 
-        ObjectMapper mapper = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+  @Override
+  public void startStream() {
 
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = mapper.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
-    }
+    Preconditions.checkNotNull(executor);
 
-    public TwitterFollowingConfiguration getConfig()              { return config; }
+    Preconditions.checkArgument(idsBatches.hasNext() || screenNameBatches.hasNext());
 
-    public static final int MAX_NUMBER_WAITING = 10000;
+    LOGGER.info("startStream");
 
-    public TwitterFollowingProvider() {
-        this.config = new ComponentConfigurator<>(TwitterFollowingConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("twitter"));
-    }
+    running.set(true);
 
-    public TwitterFollowingProvider(TwitterFollowingConfiguration config) {
-        super(config);
-        this.config = config;
+    while (idsBatches.hasNext()) {
+      submitFollowingThreads(idsBatches.next());
     }
-
-    @Override
-    public void prepare(Object o) {
-        super.prepare(config);
-        Preconditions.checkNotNull(getConfig().getEndpoint());
-        Preconditions.checkArgument(getConfig().getEndpoint().equals("friends") || getConfig().getEndpoint().equals("followers"));
-        return;
+    while (screenNameBatches.hasNext()) {
+      submitFollowingThreads(screenNameBatches.next());
     }
 
-    @Override
-    public void startStream() {
+    executor.shutdown();
 
-        Preconditions.checkNotNull(executor);
-
-        Preconditions.checkArgument(idsBatches.hasNext() || screenNameBatches.hasNext());
-
-        LOGGER.info("startStream");
-
-        running.set(true);
-
-        while (idsBatches.hasNext()) {
-            submitFollowingThreads(idsBatches.next());
-        }
-        while (screenNameBatches.hasNext()) {
-            submitFollowingThreads(screenNameBatches.next());
-        }
+  }
 
-        executor.shutdown();
+  protected void submitFollowingThreads(Long[] ids) {
+    Twitter client = getTwitterClient();
 
+    for (int i = 0; i < ids.length; i++) {
+      TwitterFollowingProviderTask providerTask = new TwitterFollowingProviderTask(this, client, ids[i]);
+      ListenableFuture future = executor.submit(providerTask);
+      futures.add(future);
+      LOGGER.info("submitted {}", ids[i]);
     }
+  }
 
-    protected void submitFollowingThreads(Long[] ids) {
-        Twitter client = getTwitterClient();
+  protected void submitFollowingThreads(String[] screenNames) {
+    Twitter client = getTwitterClient();
 
-        for (int i = 0; i < ids.length; i++) {
-            TwitterFollowingProviderTask providerTask = new TwitterFollowingProviderTask(this, client, ids[i]);
-            ListenableFuture future = executor.submit(providerTask);
-            futures.add(future);
-            LOGGER.info("submitted {}", ids[i]);
-        }
+    for (int i = 0; i < screenNames.length; i++) {
+      TwitterFollowingProviderTask providerTask = new TwitterFollowingProviderTask(this, client, screenNames[i]);
+      ListenableFuture future = executor.submit(providerTask);
+      futures.add(future);
+      LOGGER.info("submitted {}", screenNames[i]);
     }
 
-    protected void submitFollowingThreads(String[] screenNames) {
-        Twitter client = getTwitterClient();
-
-        for (int i = 0; i < screenNames.length; i++) {
-            TwitterFollowingProviderTask providerTask = new TwitterFollowingProviderTask(this, client, screenNames[i]);
-            ListenableFuture future = executor.submit(providerTask);
-            futures.add(future);
-            LOGGER.info("submitted {}", screenNames[i]);
-        }
-
-    }
+  }
 
-    @Override
-    public StreamsResultSet readCurrent() {
+  @Override
+  public StreamsResultSet readCurrent() {
 
-        LOGGER.info("{}{} - readCurrent", idsBatches, screenNameBatches);
+    LOGGER.info("{}{} - readCurrent", idsBatches, screenNameBatches);
 
-        StreamsResultSet result;
+    StreamsResultSet result;
 
-        try {
-            lock.writeLock().lock();
-            result = new StreamsResultSet(providerQueue);
-            result.setCounter(new DatumStatusCounter());
-            providerQueue = constructQueue();
-            LOGGER.debug("{}{} - providing {} docs", idsBatches, screenNameBatches, result.size());
-        } finally {
-            lock.writeLock().unlock();
-        }
+    try {
+      lock.writeLock().lock();
+      result = new StreamsResultSet(providerQueue);
+      result.setCounter(new DatumStatusCounter());
+      providerQueue = constructQueue();
+      LOGGER.debug("{}{} - providing {} docs", idsBatches, screenNameBatches, result.size());
+    } finally {
+      lock.writeLock().unlock();
+    }
 
-        return result;
+    return result;
 
-    }
+  }
 
-    @Override
-    public boolean isRunning() {
-        if (providerQueue.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
-            LOGGER.info("Completed");
-            running.set(false);
-            LOGGER.info("Exiting");
-        }
-        return running.get();
+  @Override
+  public boolean isRunning() {
+    if (providerQueue.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
+      LOGGER.info("Completed");
+      running.set(false);
+      LOGGER.info("Exiting");
     }
+    return running.get();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProviderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProviderTask.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProviderTask.java
index f2346fb..ee800fa 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProviderTask.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterFollowingProviderTask.java
@@ -18,13 +18,14 @@
 
 package org.apache.streams.twitter.provider;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.twitter.pojo.Follow;
 import org.apache.streams.twitter.pojo.User;
 import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import twitter4j.PagableResponseList;
@@ -37,188 +38,208 @@ import twitter4j.TwitterObjectFactory;
  */
 public class TwitterFollowingProviderTask implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterFollowingProviderTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterFollowingProviderTask.class);
+
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  protected TwitterFollowingProvider provider;
+  protected Twitter client;
+  protected Long id;
+  protected String screenName;
+
+  int count = 0;
+
+  /**
+   * TwitterFollowingProviderTask constructor.
+   * @param provider TwitterFollowingProvider
+   * @param twitter Twitter
+   * @param id numeric id
+   */
+  public TwitterFollowingProviderTask(TwitterFollowingProvider provider, Twitter twitter, Long id) {
+    this.provider = provider;
+    this.client = twitter;
+    this.id = id;
+  }
+
+  /**
+   * TwitterFollowingProviderTask constructor.
+   * @param provider TwitterFollowingProvider
+   * @param twitter Twitter
+   * @param screenName screenName
+   */
+  public TwitterFollowingProviderTask(TwitterFollowingProvider provider, Twitter twitter, String screenName) {
+    this.provider = provider;
+    this.client = twitter;
+    this.screenName = screenName;
+  }
+
+
+  @Override
+  public void run() {
+
+    Preconditions.checkArgument(id != null || screenName != null);
+
+    if ( id != null ) {
+      getFollowing(id);
+    } else if ( screenName != null) {
+      getFollowing(screenName);
+    }
 
-    private final static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+    LOGGER.info(id != null ? id.toString() : screenName + " Thread Finished");
 
-    protected TwitterFollowingProvider provider;
-    protected Twitter client;
-    protected Long id;
-    protected String screenName;
+  }
 
-    int count = 0;
+  protected void getFollowing(Long id) {
 
-    public TwitterFollowingProviderTask(TwitterFollowingProvider provider, Twitter twitter, Long id) {
-        this.provider = provider;
-        this.client = twitter;
-        this.id = id;
-    }
+    Preconditions.checkArgument(
+        provider.getConfig().getEndpoint().equals("friends")
+        || provider.getConfig().getEndpoint().equals("followers")
+    );
 
-    public TwitterFollowingProviderTask(TwitterFollowingProvider provider, Twitter twitter, String screenName) {
-        this.provider = provider;
-        this.client = twitter;
-        this.screenName = screenName;
+    if ( provider.getConfig().getIdsOnly() ) {
+      collectIds(id);
+    } else {
+      collectUsers(id);
     }
+  }
 
+  protected void getFollowing(String screenName) {
 
-    @Override
-    public void run() {
+    twitter4j.User user = null;
+    try {
+      user = client.users().showUser(screenName);
+    } catch (TwitterException ex) {
+      LOGGER.error("Failure looking up " + id);
+    }
+    Preconditions.checkNotNull(user);
+    getFollowing(user.getId());
+  }
 
-        Preconditions.checkArgument(id != null || screenName != null);
+  private void collectUsers(Long id) {
+    int keepTrying = 0;
 
-        if( id != null )
-            getFollowing(id);
-        else if( screenName != null)
-            getFollowing(screenName);
+    long curser = -1;
 
-        LOGGER.info(id != null ? id.toString() : screenName + " Thread Finished");
+    do {
+      try {
+        twitter4j.User user;
+        String userJson;
+        try {
+          user = client.users().showUser(id);
+          userJson = TwitterObjectFactory.getRawJSON(user);
+        } catch (TwitterException ex) {
+          LOGGER.error("Failure looking up " + id);
+          break;
+        }
 
-    }
+        PagableResponseList<twitter4j.User> list = null;
+        if ( provider.getConfig().getEndpoint().equals("followers") ) {
+          list = client.friendsFollowers().getFollowersList(id.longValue(), curser, provider.getConfig().getMaxItems().intValue());
+        } else if ( provider.getConfig().getEndpoint().equals("friends") ) {
+          list = client.friendsFollowers().getFriendsList(id.longValue(), curser, provider.getConfig().getMaxItems().intValue());
+        }
 
-    protected void getFollowing(Long id) {
+        Preconditions.checkNotNull(list);
+        Preconditions.checkArgument(list.size() > 0);
 
-        Preconditions.checkArgument(provider.getConfig().getEndpoint().equals("friends") || provider.getConfig().getEndpoint().equals("followers"));
+        for (twitter4j.User other : list) {
 
-        if( provider.getConfig().getIdsOnly() )
-            collectIds(id);
-        else
-            collectUsers(id);
-    }
+          String otherJson = TwitterObjectFactory.getRawJSON(other);
 
-    private void collectUsers(Long id) {
-        int keepTrying = 0;
-
-        long curser = -1;
-
-        do
-        {
-            try
-            {
-                twitter4j.User user;
-                String userJson;
-                try {
-                    user = client.users().showUser(id);
-                    userJson = TwitterObjectFactory.getRawJSON(user);
-                } catch (TwitterException e) {
-                    LOGGER.error("Failure looking up " + id);
-                    break;
-                }
-
-                PagableResponseList<twitter4j.User> list = null;
-                if( provider.getConfig().getEndpoint().equals("followers") )
-                    list = client.friendsFollowers().getFollowersList(id.longValue(), curser, provider.getConfig().getMaxItems().intValue());
-                else if( provider.getConfig().getEndpoint().equals("friends") )
-                    list = client.friendsFollowers().getFriendsList(id.longValue(), curser, provider.getConfig().getMaxItems().intValue());
-
-                Preconditions.checkNotNull(list);
-                Preconditions.checkArgument(list.size() > 0);
-
-                for (twitter4j.User other : list) {
-
-                    String otherJson = TwitterObjectFactory.getRawJSON(other);
-
-                    try {
-                        Follow follow = null;
-                        if( provider.getConfig().getEndpoint().equals("followers") ) {
-                            follow = new Follow()
-                                    .withFollowee(mapper.readValue(userJson, User.class))
-                                    .withFollower(mapper.readValue(otherJson, User.class));
-                        } else if( provider.getConfig().getEndpoint().equals("friends") ) {
-                            follow = new Follow()
-                                    .withFollowee(mapper.readValue(otherJson, User.class))
-                                    .withFollower(mapper.readValue(userJson, User.class));
-                        }
-
-                        Preconditions.checkNotNull(follow);
-
-                        if( count < provider.getConfig().getMaxItems()) {
-                            ComponentUtils.offerUntilSuccess(new StreamsDatum(follow), provider.providerQueue);
-                            count++;
-                        }
-
-                    } catch (Exception e) {
-                        LOGGER.warn("Exception: {}", e);
-                    }
-                }
-                if( !list.hasNext() ) break;
-                if( list.getNextCursor() == 0 ) break;
-                curser = list.getNextCursor();
-            }
-            catch(TwitterException twitterException) {
-                keepTrying += TwitterErrorHandler.handleTwitterError(client, twitterException);
+          try {
+            Follow follow = null;
+            if ( provider.getConfig().getEndpoint().equals("followers") ) {
+              follow = new Follow()
+                  .withFollowee(mapper.readValue(userJson, User.class))
+                  .withFollower(mapper.readValue(otherJson, User.class));
+            } else if ( provider.getConfig().getEndpoint().equals("friends") ) {
+              follow = new Follow()
+                  .withFollowee(mapper.readValue(otherJson, User.class))
+                  .withFollower(mapper.readValue(userJson, User.class));
             }
-            catch(Exception e) {
-                keepTrying += TwitterErrorHandler.handleTwitterError(client, e);
+
+            Preconditions.checkNotNull(follow);
+
+            if ( count < provider.getConfig().getMaxItems()) {
+              ComponentUtils.offerUntilSuccess(new StreamsDatum(follow), provider.providerQueue);
+              count++;
             }
-        } while (curser != 0 && keepTrying < provider.getConfig().getRetryMax() && count < provider.getConfig().getMaxItems());
+
+          } catch (Exception ex) {
+            LOGGER.warn("Exception: {}", ex);
+          }
+        }
+        if ( !list.hasNext() ) {
+          break;
+        }
+        if ( list.getNextCursor() == 0 ) {
+          break;
+        }
+        curser = list.getNextCursor();
+      } catch (TwitterException twitterException) {
+        keepTrying += TwitterErrorHandler.handleTwitterError(client, twitterException);
+      } catch (Exception ex) {
+        keepTrying += TwitterErrorHandler.handleTwitterError(client, ex);
+      }
     }
+    while (curser != 0 && keepTrying < provider.getConfig().getRetryMax() && count < provider.getConfig().getMaxItems());
+  }
 
-    private void collectIds(Long id) {
-        int keepTrying = 0;
-
-        long curser = -1;
-
-        do
-        {
-            try
-            {
-                twitter4j.IDs ids = null;
-                if( provider.getConfig().getEndpoint().equals("followers") )
-                    ids = client.friendsFollowers().getFollowersIDs(id.longValue(), curser, provider.getConfig().getMaxItems().intValue());
-                else if( provider.getConfig().getEndpoint().equals("friends") )
-                    ids = client.friendsFollowers().getFriendsIDs(id.longValue(), curser, provider.getConfig().getMaxItems().intValue());
-
-                Preconditions.checkNotNull(ids);
-                Preconditions.checkArgument(ids.getIDs().length > 0);
-
-                for (long otherId : ids.getIDs()) {
-
-                    try {
-                        Follow follow = null;
-                        if( provider.getConfig().getEndpoint().equals("followers") ) {
-                            follow = new Follow()
-                                    .withFollowee(new User().withId(id))
-                                    .withFollower(new User().withId(otherId));
-                        } else if( provider.getConfig().getEndpoint().equals("friends") ) {
-                            follow = new Follow()
-                                    .withFollowee(new User().withId(otherId))
-                                    .withFollower(new User().withId(id));
-                        }
-
-                        Preconditions.checkNotNull(follow);
-
-                        if( count < provider.getConfig().getMaxItems()) {
-                            ComponentUtils.offerUntilSuccess(new StreamsDatum(follow), provider.providerQueue);
-                            count++;
-                        }
-                    } catch (Exception e) {
-                        LOGGER.warn("Exception: {}", e);
-                    }
-                }
-                if( !ids.hasNext() ) break;
-                if( ids.getNextCursor() == 0 ) break;
-                curser = ids.getNextCursor();
-            }
-            catch(TwitterException twitterException) {
-                keepTrying += TwitterErrorHandler.handleTwitterError(client, id, twitterException);
-            }
-            catch(Exception e) {
-                keepTrying += TwitterErrorHandler.handleTwitterError(client, e);
+  private void collectIds(Long id) {
+    int keepTrying = 0;
+
+    long curser = -1;
+
+    do {
+      try {
+        twitter4j.IDs ids = null;
+        if ( provider.getConfig().getEndpoint().equals("followers") ) {
+          ids = client.friendsFollowers().getFollowersIDs(id.longValue(), curser, provider.getConfig().getMaxItems().intValue());
+        } else if ( provider.getConfig().getEndpoint().equals("friends") ) {
+          ids = client.friendsFollowers().getFriendsIDs(id.longValue(), curser, provider.getConfig().getMaxItems().intValue());
+        }
+
+        Preconditions.checkNotNull(ids);
+        Preconditions.checkArgument(ids.getIDs().length > 0);
+
+        for (long otherId : ids.getIDs()) {
+
+          try {
+            Follow follow = null;
+            if ( provider.getConfig().getEndpoint().equals("followers") ) {
+              follow = new Follow()
+                  .withFollowee(new User().withId(id))
+                  .withFollower(new User().withId(otherId));
+            } else if ( provider.getConfig().getEndpoint().equals("friends") ) {
+              follow = new Follow()
+                  .withFollowee(new User().withId(otherId))
+                  .withFollower(new User().withId(id));
             }
-        } while (curser != 0 && keepTrying < provider.getConfig().getRetryMax() && count < provider.getConfig().getMaxItems());
-    }
 
-    protected void getFollowing(String screenName) {
+            Preconditions.checkNotNull(follow);
 
-        twitter4j.User user = null;
-        try {
-            user = client.users().showUser(screenName);
-        } catch (TwitterException e) {
-            LOGGER.error("Failure looking up " + id);
+            if ( count < provider.getConfig().getMaxItems()) {
+              ComponentUtils.offerUntilSuccess(new StreamsDatum(follow), provider.providerQueue);
+              count++;
+            }
+          } catch (Exception ex) {
+            LOGGER.warn("Exception: {}", ex);
+          }
+        }
+        if ( !ids.hasNext() ) {
+          break;
         }
-        Preconditions.checkNotNull(user);
-        getFollowing(user.getId());
+        if ( ids.getNextCursor() == 0 ) {
+          break;
+        }
+        curser = ids.getNextCursor();
+      } catch (TwitterException twitterException) {
+        keepTrying += TwitterErrorHandler.handleTwitterError(client, id, twitterException);
+      } catch (Exception ex) {
+        keepTrying += TwitterErrorHandler.handleTwitterError(client, ex);
+      }
     }
-
+    while (curser != 0 && keepTrying < provider.getConfig().getRetryMax() && count < provider.getConfig().getMaxItems());
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterProviderUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterProviderUtil.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterProviderUtil.java
index d9f4ec2..48666cb 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterProviderUtil.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterProviderUtil.java
@@ -16,28 +16,34 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.twitter.provider;
 
 import org.apache.streams.twitter.TwitterConfiguration;
 
 /**
- * Created by sblackmon on 7/26/15.
+ * TwitterProviderUtil contains utilities for Twitter Providers.
  */
 public class TwitterProviderUtil {
 
-    public static String baseUrl(TwitterConfiguration config) {
+  /**
+   * baseUrl from TwitterConfiguration.
+   * @param config TwitterConfiguration
+   * @return baseUrl
+   */
+  public static String baseUrl(TwitterConfiguration config) {
 
-        String baseUrl = new StringBuilder()
-                .append(config.getProtocol())
-                .append("://")
-                .append(config.getHost())
-                .append(":")
-                .append(config.getPort())
-                .append("/")
-                .append(config.getVersion())
-                .append("/")
-                .toString();
+    String baseUrl = new StringBuilder()
+        .append(config.getProtocol())
+        .append("://")
+        .append(config.getHost())
+        .append(":")
+        .append(config.getPort())
+        .append("/")
+        .append(config.getVersion())
+        .append("/")
+        .toString();
 
-        return baseUrl;
-    }
+    return baseUrl;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamHelper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamHelper.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamHelper.java
new file mode 100644
index 0000000..a4562ef
--- /dev/null
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamHelper.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.twitter.provider;
+
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.twitter.converter.TwitterDocumentClassifier;
+import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
+import com.twitter.hbc.core.processor.StringDelimitedProcessor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+/**
+ * TwitterStreamHelper helps with hosebird twitter stream.
+ */
+public class TwitterStreamHelper extends StringDelimitedProcessor {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterStreamHelper.class);
+  private static final int DEFAULT_POOL_SIZE = 5;
+
+  private static final TwitterDocumentClassifier TWITTER_DOCUMENT_CLASSIFIER = new TwitterDocumentClassifier();
+
+  private final TwitterStreamProvider provider;
+  private final ExecutorService service;
+
+  public TwitterStreamHelper(TwitterStreamProvider provider) {
+    this(provider, DEFAULT_POOL_SIZE);
+  }
+
+  /**
+   * TwitterStreamHelper constructor.
+   * @param provider TwitterStreamProvider
+   * @param poolSize poolSize
+   */
+  public TwitterStreamHelper(TwitterStreamProvider provider, int poolSize) {
+    //We are only going to use the Hosebird processor to manage the extraction of the tweets from the Stream
+    super(null);
+    service = Executors.newFixedThreadPool(poolSize);
+    this.provider = provider;
+  }
+
+  @Override
+  public boolean process() throws IOException, InterruptedException {
+    String msg;
+    do {
+      msg = this.processNextMessage();
+      if (msg == null) {
+        Thread.sleep(10);
+      }
+    }
+    while (msg == null);
+
+    //Deserializing to an ObjectNode can take time.  Parallelize the task to improve throughput
+    return provider.addDatum(service.submit(new StreamDeserializer(msg)));
+  }
+
+  public void cleanUp() {
+    ComponentUtils.shutdownExecutor(service, 1, 30);
+  }
+
+  protected static class StreamDeserializer implements Callable<List<StreamsDatum>> {
+
+    protected static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+    protected String item;
+
+    public StreamDeserializer(String item) {
+      this.item = item;
+    }
+
+    @Override
+    public List<StreamsDatum> call() throws Exception {
+      if (item != null) {
+        Class itemClass = TWITTER_DOCUMENT_CLASSIFIER.detectClasses(item).get(0);
+        Object document = mapper.readValue(item, itemClass);
+        StreamsDatum rawDatum = new StreamsDatum(document);
+        return Lists.newArrayList(rawDatum);
+      }
+      return new ArrayList<>();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProcessor.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProcessor.java
deleted file mode 100644
index 96df67b..0000000
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProcessor.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.twitter.provider;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
-import com.twitter.hbc.core.processor.StringDelimitedProcessor;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.util.ComponentUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-/**
- *
- */
-public class TwitterStreamProcessor extends StringDelimitedProcessor {
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(TwitterStreamProcessor.class);
-    private static final int DEFAULT_POOL_SIZE = 5;
-
-    private final TwitterStreamProvider provider;
-    private final ExecutorService service;
-
-    public TwitterStreamProcessor(TwitterStreamProvider provider) {
-        this(provider, DEFAULT_POOL_SIZE);
-    }
-
-    public TwitterStreamProcessor(TwitterStreamProvider provider, int poolSize) {
-        //We are only going to use the Hosebird processor to manage the extraction of the tweets from the Stream
-        super(null);
-        service = Executors.newFixedThreadPool(poolSize);
-        this.provider = provider;
-    }
-
-
-    @Override
-    public boolean process() throws IOException, InterruptedException {
-        String msg;
-        do {
-            msg = this.processNextMessage();
-            if(msg == null) {
-                Thread.sleep(10);
-            }
-        } while(msg == null);
-
-        //Deserializing to an ObjectNode can take time.  Parallelize the task to improve throughput
-        return provider.addDatum(service.submit(new StreamDeserializer(msg)));
-    }
-
-    public void cleanUp() {
-        ComponentUtils.shutdownExecutor(service, 1, 30);
-    }
-
-    protected static class StreamDeserializer implements Callable<List<StreamsDatum>> {
-
-        protected static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-        protected String item;
-
-        public StreamDeserializer(String item) {
-            this.item = item;
-        }
-
-        @Override
-        public List<StreamsDatum> call() throws Exception {
-            if(item != null) {
-                Class itemClass = TwitterEventClassifier.detectClass(item);
-                Object document = mapper.readValue(item, itemClass);
-                StreamsDatum rawDatum = new StreamsDatum(document);
-                return Lists.newArrayList(rawDatum);
-            }
-            return new ArrayList<>();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProvider.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProvider.java
index 3856935..1895ee2 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProvider.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterStreamProvider.java
@@ -18,6 +18,20 @@
 
 package org.apache.streams.twitter.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfiguration;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.DatumStatus;
+import org.apache.streams.core.DatumStatusCountable;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProvider;
+import org.apache.streams.core.StreamsResultSet;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.twitter.TwitterStreamConfiguration;
+import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
+import org.apache.streams.util.ComponentUtils;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
@@ -41,19 +55,6 @@ import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigParseOptions;
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.DatumStatus;
-import org.apache.streams.core.DatumStatusCountable;
-import org.apache.streams.core.DatumStatusCounter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProvider;
-import org.apache.streams.core.StreamsResultSet;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.twitter.TwitterStreamConfiguration;
-import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
-import org.apache.streams.util.ComponentUtils;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -84,269 +85,282 @@ import java.util.concurrent.atomic.AtomicBoolean;
  */
 public class TwitterStreamProvider implements StreamsProvider, Serializable, DatumStatusCountable {
 
-    public final static String STREAMS_ID = "TwitterStreamProvider";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterStreamProvider.class);
-
-    public static void main(String[] args) {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        TwitterStreamConfiguration config = new ComponentConfigurator<>(TwitterStreamConfiguration.class).detectConfiguration(typesafe, "twitter");
-        TwitterStreamProvider provider = new TwitterStreamProvider(config);
-
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
-
-        PrintStream outStream = null;
+  public static final String STREAMS_ID = "TwitterStreamProvider";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterStreamProvider.class);
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply (at least) the following required configuration in application.conf:
+   *
+   * <p/>
+   * twitter.oauth.consumerKey
+   * twitter.oauth.consumerSecret
+   * twitter.oauth.accessToken
+   * twitter.oauth.accessTokenSecret
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.twitter.provider.TwitterStreamProvider -Dexec.args="application.conf tweets.json"
+   *
+   * @param args
+   */
+  public static void main(String[] args) {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    TwitterStreamConfiguration config = new ComponentConfigurator<>(TwitterStreamConfiguration.class).detectConfiguration(typesafe, "twitter");
+    TwitterStreamProvider provider = new TwitterStreamProvider(config);
+
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+
+    PrintStream outStream = null;
+    try {
+      outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    } catch (FileNotFoundException ex) {
+      LOGGER.error("FileNotFoundException", ex);
+      return;
+    }
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
         try {
-            outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        } catch (FileNotFoundException e) {
-            LOGGER.error("FileNotFoundException", e);
-            return;
+          json = mapper.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
         }
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = mapper.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+      }
     }
-
-    public static final int MAX_BATCH = 1000;
-
-    private TwitterStreamConfiguration config;
-
-    public TwitterStreamConfiguration getConfig() {
-        return config;
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
+
+  public static final int MAX_BATCH = 1000;
+
+  private TwitterStreamConfiguration config;
+
+  public TwitterStreamConfiguration getConfig() {
+    return config;
+  }
+
+  public void setConfig(TwitterStreamConfiguration config) {
+    this.config = config;
+  }
+
+  protected volatile Queue<Future<List<StreamsDatum>>> providerQueue;
+
+  protected Hosts hosebirdHosts;
+  protected Authentication auth;
+  protected StreamingEndpoint endpoint;
+  protected BasicClient client;
+  protected AtomicBoolean running = new AtomicBoolean(false);
+  protected TwitterStreamHelper processor = new TwitterStreamHelper(this);
+  private DatumStatusCounter countersCurrent = new DatumStatusCounter();
+  private DatumStatusCounter countersTotal = new DatumStatusCounter();
+
+  public TwitterStreamProvider() {
+    this.config = new ComponentConfigurator<>(TwitterStreamConfiguration.class).detectConfiguration(StreamsConfigurator.config, "twitter");
+  }
+
+  public TwitterStreamProvider(TwitterStreamConfiguration config) {
+    this.config = config;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+    client.connect();
+    running.set(true);
+  }
+
+  @Override
+  public synchronized StreamsResultSet readCurrent() {
+
+    StreamsResultSet current;
+    synchronized (this) {
+      Queue<StreamsDatum> drain = Queues.newLinkedBlockingDeque();
+      drainTo(drain);
+      current = new StreamsResultSet(drain);
+      current.setCounter(new DatumStatusCounter());
+      current.getCounter().add(countersCurrent);
+      countersTotal.add(countersCurrent);
+      countersCurrent = new DatumStatusCounter();
     }
 
-    public void setConfig(TwitterStreamConfiguration config) {
-        this.config = config;
-    }
+    return current;
+  }
 
-    protected volatile Queue<Future<List<StreamsDatum>>> providerQueue;
-
-    protected Hosts hosebirdHosts;
-    protected Authentication auth;
-    protected StreamingEndpoint endpoint;
-    protected BasicClient client;
-    protected AtomicBoolean running = new AtomicBoolean(false);
-    protected TwitterStreamProcessor processor = new TwitterStreamProcessor(this);
-    private DatumStatusCounter countersCurrent = new DatumStatusCounter();
-    private DatumStatusCounter countersTotal = new DatumStatusCounter();
-
-    private static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
-        return new ThreadPoolExecutor(nThreads, nThreads,
-                5000L, TimeUnit.MILLISECONDS,
-                new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
-    }
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    throw new NotImplementedException();
+  }
 
-    public TwitterStreamProvider() {
-        this.config = new ComponentConfigurator<>(TwitterStreamConfiguration.class).detectConfiguration(StreamsConfigurator.config, "twitter");
-    }
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end)  {
+    throw new NotImplementedException();
+  }
 
-    public TwitterStreamProvider(TwitterStreamConfiguration config) {
-        this.config = config;
-    }
+  @Override
+  public boolean isRunning() {
+    return this.running.get() && !client.isDone();
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public void prepare(Object configurationObject) {
 
-    @Override
-    public void startStream() {
-        client.connect();
-        running.set(true);
-    }
+    Preconditions.checkNotNull(config.getEndpoint());
 
-    @Override
-    public synchronized StreamsResultSet readCurrent() {
-
-        StreamsResultSet current;
-        synchronized(this) {
-            Queue<StreamsDatum> drain = Queues.newLinkedBlockingDeque();
-            drainTo(drain);
-            current = new StreamsResultSet(drain);
-            current.setCounter(new DatumStatusCounter());
-            current.getCounter().add(countersCurrent);
-            countersTotal.add(countersCurrent);
-            countersCurrent = new DatumStatusCounter();
-        }
+    if (config.getEndpoint().equals("userstream") ) {
 
-        return current;
-    }
+      hosebirdHosts = new HttpHosts(Constants.USERSTREAM_HOST);
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        throw new NotImplementedException();
-    }
+      UserstreamEndpoint userstreamEndpoint = new UserstreamEndpoint();
+      userstreamEndpoint.withFollowings(true);
+      userstreamEndpoint.withUser(false);
+      userstreamEndpoint.allReplies(false);
+      endpoint = userstreamEndpoint;
+    } else if (config.getEndpoint().equals("sample") ) {
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end)  {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public boolean isRunning() {
-        return this.running.get() && !client.isDone();
-    }
+      hosebirdHosts = new HttpHosts(Constants.STREAM_HOST);
 
-    @Override
-    public void prepare(Object o) {
+      boolean track = config.getTrack() != null && !config.getTrack().isEmpty();
+      boolean follow = config.getFollow() != null && !config.getFollow().isEmpty();
 
-        Preconditions.checkNotNull(config.getEndpoint());
-
-        if(config.getEndpoint().equals("userstream") ) {
-
-            hosebirdHosts = new HttpHosts(Constants.USERSTREAM_HOST);
-
-            UserstreamEndpoint userstreamEndpoint = new UserstreamEndpoint();
-            userstreamEndpoint.withFollowings(true);
-            userstreamEndpoint.withUser(false);
-            userstreamEndpoint.allReplies(false);
-            endpoint = userstreamEndpoint;
+      if ( track || follow ) {
+        LOGGER.debug("***\tPRESENT\t***");
+        StatusesFilterEndpoint statusesFilterEndpoint = new StatusesFilterEndpoint();
+        if ( track ) {
+          statusesFilterEndpoint.trackTerms(config.getTrack());
         }
-        else if(config.getEndpoint().equals("sample") ) {
-
-            hosebirdHosts = new HttpHosts(Constants.STREAM_HOST);
-
-            boolean track = config.getTrack() != null && !config.getTrack().isEmpty();
-            boolean follow = config.getFollow() != null && !config.getFollow().isEmpty();
-
-            if( track || follow ) {
-                LOGGER.debug("***\tPRESENT\t***");
-                StatusesFilterEndpoint statusesFilterEndpoint = new StatusesFilterEndpoint();
-                if( track ) {
-                    statusesFilterEndpoint.trackTerms(config.getTrack());
-                }
-                if( follow ) {
-                    statusesFilterEndpoint.followings(config.getFollow());
-                }
-                this.endpoint = statusesFilterEndpoint;
-            } else {
-                endpoint = new StatusesSampleEndpoint();
-            }
-
+        if ( follow ) {
+          statusesFilterEndpoint.followings(config.getFollow());
         }
-        else if( config.getEndpoint().endsWith("firehose")) {
-            hosebirdHosts = new HttpHosts(Constants.STREAM_HOST);
-            endpoint = new StatusesFirehoseEndpoint();
-        } else {
-            LOGGER.error("NO ENDPOINT RESOLVED");
-            return;
-        }
-
-        if( config.getBasicauth() != null ) {
-
-            Preconditions.checkNotNull(config.getBasicauth().getUsername());
-            Preconditions.checkNotNull(config.getBasicauth().getPassword());
-
-            auth = new BasicAuth(
-                    config.getBasicauth().getUsername(),
-                    config.getBasicauth().getPassword()
-            );
-
-        } else if( config.getOauth() != null ) {
-
-            Preconditions.checkNotNull(config.getOauth().getConsumerKey());
-            Preconditions.checkNotNull(config.getOauth().getConsumerSecret());
-            Preconditions.checkNotNull(config.getOauth().getAccessToken());
-            Preconditions.checkNotNull(config.getOauth().getAccessTokenSecret());
-
-            auth = new OAuth1(config.getOauth().getConsumerKey(),
-                    config.getOauth().getConsumerSecret(),
-                    config.getOauth().getAccessToken(),
-                    config.getOauth().getAccessTokenSecret());
+        this.endpoint = statusesFilterEndpoint;
+      } else {
+        endpoint = new StatusesSampleEndpoint();
+      }
+
+    } else if ( config.getEndpoint().endsWith("firehose")) {
+      hosebirdHosts = new HttpHosts(Constants.STREAM_HOST);
+      endpoint = new StatusesFirehoseEndpoint();
+    } else {
+      LOGGER.error("NO ENDPOINT RESOLVED");
+      return;
+    }
 
-        } else {
-            LOGGER.error("NO AUTH RESOLVED");
-            return;
-        }
+    if ( config.getBasicauth() != null ) {
 
-        LOGGER.debug("host={}\tendpoint={}\taut={}", hosebirdHosts, endpoint, auth);
+      Preconditions.checkNotNull(config.getBasicauth().getUsername());
+      Preconditions.checkNotNull(config.getBasicauth().getPassword());
 
-        providerQueue = new LinkedBlockingQueue<>(MAX_BATCH);
+      auth = new BasicAuth(
+          config.getBasicauth().getUsername(),
+          config.getBasicauth().getPassword()
+      );
 
-        client = new ClientBuilder()
-            .name("apache/streams/streams-contrib/streams-provider-twitter")
-            .hosts(hosebirdHosts)
-            .endpoint(endpoint)
-            .authentication(auth)
-            .connectionTimeout(1200000)
-            .processor(processor)
-            .build();
+    } else if ( config.getOauth() != null ) {
 
-    }
+      Preconditions.checkNotNull(config.getOauth().getConsumerKey());
+      Preconditions.checkNotNull(config.getOauth().getConsumerSecret());
+      Preconditions.checkNotNull(config.getOauth().getAccessToken());
+      Preconditions.checkNotNull(config.getOauth().getAccessTokenSecret());
 
-    @Override
-    public void cleanUp() {
-        this.client.stop();
-        this.processor.cleanUp();
-        this.running.set(false);
-    }
+      auth = new OAuth1(config.getOauth().getConsumerKey(),
+          config.getOauth().getConsumerSecret(),
+          config.getOauth().getAccessToken(),
+          config.getOauth().getAccessTokenSecret());
 
-    @Override
-    public DatumStatusCounter getDatumStatusCounter() {
-        return countersTotal;
+    } else {
+      LOGGER.error("NO AUTH RESOLVED");
+      return;
     }
 
-    protected boolean addDatum(Future<List<StreamsDatum>> future) {
-        try {
-            ComponentUtils.offerUntilSuccess(future, providerQueue);
-            countersCurrent.incrementStatus(DatumStatus.SUCCESS);
-            return true;
-        } catch (Exception e) {
-            countersCurrent.incrementStatus(DatumStatus.FAIL);
-            LOGGER.warn("Unable to enqueue item from Twitter stream");
-            return false;
-        }
+    LOGGER.debug("host={}\tendpoint={}\taut={}", hosebirdHosts, endpoint, auth);
+
+    providerQueue = new LinkedBlockingQueue<>(MAX_BATCH);
+
+    client = new ClientBuilder()
+        .name("apache/streams/streams-contrib/streams-provider-twitter")
+        .hosts(hosebirdHosts)
+        .endpoint(endpoint)
+        .authentication(auth)
+        .connectionTimeout(1200000)
+        .processor(processor)
+        .build();
+
+  }
+
+  @Override
+  public void cleanUp() {
+    this.client.stop();
+    this.processor.cleanUp();
+    this.running.set(false);
+  }
+
+  @Override
+  public DatumStatusCounter getDatumStatusCounter() {
+    return countersTotal;
+  }
+
+  protected boolean addDatum(Future<List<StreamsDatum>> future) {
+    try {
+      ComponentUtils.offerUntilSuccess(future, providerQueue);
+      countersCurrent.incrementStatus(DatumStatus.SUCCESS);
+      return true;
+    } catch (Exception ex) {
+      countersCurrent.incrementStatus(DatumStatus.FAIL);
+      LOGGER.warn("Unable to enqueue item from Twitter stream");
+      return false;
     }
-
-    protected void drainTo(Queue<StreamsDatum> drain) {
-        int count = 0;
-        while(!providerQueue.isEmpty() && count <= MAX_BATCH) {
-            for(StreamsDatum datum : pollForDatum()) {
-                ComponentUtils.offerUntilSuccess(datum, drain);
-                count++;
-            }
-        }
+  }
+
+  protected void drainTo(Queue<StreamsDatum> drain) {
+    int count = 0;
+    while (!providerQueue.isEmpty() && count <= MAX_BATCH) {
+      for (StreamsDatum datum : pollForDatum()) {
+        ComponentUtils.offerUntilSuccess(datum, drain);
+        count++;
+      }
     }
-
-    protected List<StreamsDatum> pollForDatum()  {
-        try {
-            return providerQueue.poll().get();
-        } catch (InterruptedException e) {
-            LOGGER.warn("Interrupted while waiting for future.  Initiate shutdown.");
-            this.cleanUp();
-            Thread.currentThread().interrupt();
-            return new ArrayList<>();
-        } catch (ExecutionException e) {
-            LOGGER.warn("Error getting tweet from future");
-            return new ArrayList<>();
-        }
+  }
+
+  protected List<StreamsDatum> pollForDatum()  {
+    try {
+      return providerQueue.poll().get();
+    } catch (InterruptedException ex) {
+      LOGGER.warn("Interrupted while waiting for future.  Initiate shutdown.");
+      this.cleanUp();
+      Thread.currentThread().interrupt();
+      return new ArrayList<>();
+    } catch (ExecutionException ex) {
+      LOGGER.warn("Error getting tweet from future");
+      return new ArrayList<>();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProvider.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProvider.java
index cea9829..7461356 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProvider.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/provider/TwitterTimelineProvider.java
@@ -18,6 +18,17 @@
 
 package org.apache.streams.twitter.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfiguration;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProvider;
+import org.apache.streams.core.StreamsResultSet;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.twitter.TwitterUserInformationConfiguration;
+import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
@@ -31,17 +42,6 @@ import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigParseOptions;
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.DatumStatusCounter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProvider;
-import org.apache.streams.core.StreamsResultSet;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.twitter.TwitterUserInformationConfiguration;
-import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
-import org.apache.streams.util.ComponentUtils;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -65,7 +65,6 @@ import java.util.List;
 import java.util.Objects;
 import java.util.Queue;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -75,320 +74,335 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 import static java.util.concurrent.Executors.newSingleThreadExecutor;
 
 /**
- *  Retrieve recent posts from a list of user ids or names.
- *
- *  To use from command line:
- *
- *  Supply (at least) the following required configuration in application.conf:
- *
- *  twitter.oauth.consumerKey
- *  twitter.oauth.consumerSecret
- *  twitter.oauth.accessToken
- *  twitter.oauth.accessTokenSecret
- *  twitter.info
- *
- *  Launch using:
- *
- *  mvn exec:java -Dexec.mainClass=org.apache.streams.twitter.provider.TwitterTimelineProvider -Dexec.args="application.conf tweets.json"
+ * Retrieve recent posts from a list of user ids or names.
  */
 public class TwitterTimelineProvider implements StreamsProvider, Serializable {
 
-    public final static String STREAMS_ID = "TwitterTimelineProvider";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(TwitterTimelineProvider.class);
-
-    public static final int MAX_NUMBER_WAITING = 10000;
-
-    private TwitterUserInformationConfiguration config;
-
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
-
-    public TwitterUserInformationConfiguration getConfig() {
-        return config;
-    }
-
-    public void setConfig(TwitterUserInformationConfiguration config) {
-        this.config = config;
-    }
-
-    protected Collection<String[]> screenNameBatches;
-    protected Collection<Long> ids;
+  public static final String STREAMS_ID = "TwitterTimelineProvider";
 
-    protected volatile Queue<StreamsDatum> providerQueue;
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterTimelineProvider.class);
 
-    protected int idsCount;
-    protected Twitter client;
+  public static final int MAX_NUMBER_WAITING = 10000;
 
-    protected ListeningExecutorService executor;
+  private TwitterUserInformationConfiguration config;
 
-    protected DateTime start;
-    protected DateTime end;
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    protected final AtomicBoolean running = new AtomicBoolean();
+  public TwitterUserInformationConfiguration getConfig() {
+    return config;
+  }
 
-    List<ListenableFuture<Object>> futures = new ArrayList<>();
+  public void setConfig(TwitterUserInformationConfiguration config) {
+    this.config = config;
+  }
 
-    Boolean jsonStoreEnabled;
-    Boolean includeEntitiesEnabled;
+  protected Collection<String[]> screenNameBatches;
+  protected Collection<Long> ids;
 
-    public static void main(String[] args) throws Exception {
+  protected volatile Queue<StreamsDatum> providerQueue;
 
-        Preconditions.checkArgument(args.length >= 2);
+  protected int idsCount;
+  protected Twitter client;
 
-        String configfile = args[0];
-        String outfile = args[1];
+  protected ListeningExecutorService executor;
 
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+  protected DateTime start;
+  protected DateTime end;
 
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+  protected final AtomicBoolean running = new AtomicBoolean();
 
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        TwitterUserInformationConfiguration config = new ComponentConfigurator<>(TwitterUserInformationConfiguration.class).detectConfiguration(typesafe, "twitter");
-        TwitterTimelineProvider provider = new TwitterTimelineProvider(config);
+  List<ListenableFuture<Object>> futures = new ArrayList<>();
 
-        ObjectMapper mapper = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
+  Boolean jsonStoreEnabled;
+  Boolean includeEntitiesEnabled;
 
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = mapper.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
-    }
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply (at least) the following required configuration in application.conf:
+   *
+   * <p/>
+   * twitter.oauth.consumerKey
+   * twitter.oauth.consumerSecret
+   * twitter.oauth.accessToken
+   * twitter.oauth.accessTokenSecret
+   * twitter.info
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.twitter.provider.TwitterTimelineProvider -Dexec.args="application.conf tweets.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
 
-    public TwitterTimelineProvider(TwitterUserInformationConfiguration config) {
-        this.config = config;
-    }
+    Preconditions.checkArgument(args.length >= 2);
 
-    public Queue<StreamsDatum> getProviderQueue() {
-        return this.providerQueue;
-    }
+    String configfile = args[0];
+    String outfile = args[1];
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
 
-    @Override
-    public void prepare(Object o) {
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
 
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    TwitterUserInformationConfiguration config = new ComponentConfigurator<>(TwitterUserInformationConfiguration.class).detectConfiguration(typesafe, "twitter");
+    TwitterTimelineProvider provider = new TwitterTimelineProvider(config);
 
+    ObjectMapper mapper = new StreamsJacksonMapper(Lists.newArrayList(TwitterDateTimeFormat.TWITTER_FORMAT));
 
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
         try {
-            lock.writeLock().lock();
-            providerQueue = constructQueue();
-        } finally {
-            lock.writeLock().unlock();
+          json = mapper.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
         }
-
-        Preconditions.checkNotNull(providerQueue);
-        Preconditions.checkNotNull(config.getOauth().getConsumerKey());
-        Preconditions.checkNotNull(config.getOauth().getConsumerSecret());
-        Preconditions.checkNotNull(config.getOauth().getAccessToken());
-        Preconditions.checkNotNull(config.getOauth().getAccessTokenSecret());
-        Preconditions.checkNotNull(config.getInfo());
-
-        consolidateToIDs();
-
-        if(ids.size() > 1)
-            executor = MoreExecutors.listeningDecorator(TwitterUserInformationProvider.newFixedThreadPoolWithQueueSize(5, ids.size()));
-        else
-            executor = MoreExecutors.listeningDecorator(newSingleThreadExecutor());
+      }
+    }
+    while ( provider.isRunning() );
+    provider.cleanUp();
+    outStream.flush();
+  }
+
+  public TwitterTimelineProvider(TwitterUserInformationConfiguration config) {
+    this.config = config;
+  }
+
+  public Queue<StreamsDatum> getProviderQueue() {
+    return this.providerQueue;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+
+    try {
+      lock.writeLock().lock();
+      providerQueue = constructQueue();
+    } finally {
+      lock.writeLock().unlock();
     }
 
-    @Override
-    public void startStream() {
+    Preconditions.checkNotNull(providerQueue);
+    Preconditions.checkNotNull(config.getOauth().getConsumerKey());
+    Preconditions.checkNotNull(config.getOauth().getConsumerSecret());
+    Preconditions.checkNotNull(config.getOauth().getAccessToken());
+    Preconditions.checkNotNull(config.getOauth().getAccessTokenSecret());
+    Preconditions.checkNotNull(config.getInfo());
 
-        LOGGER.debug("{} startStream", STREAMS_ID);
+    consolidateToIDs();
 
-        Preconditions.checkArgument(!ids.isEmpty());
+    if (ids.size() > 1) {
+      executor = MoreExecutors.listeningDecorator(TwitterUserInformationProvider.newFixedThreadPoolWithQueueSize(5, ids.size()));
+    } else {
+      executor = MoreExecutors.listeningDecorator(newSingleThreadExecutor());
+    }
+  }
 
-        running.set(true);
+  @Override
+  public void startStream() {
 
-        submitTimelineThreads(ids.toArray(new Long[0]));
+    LOGGER.debug("{} startStream", STREAMS_ID);
 
-        executor.shutdown();
+    Preconditions.checkArgument(!ids.isEmpty());
 
-    }
+    running.set(true);
 
-    public boolean shouldContinuePulling(List<Status> statuses) {
-        return (statuses != null) && (statuses.size() > 0);
-    }
+    submitTimelineThreads(ids.toArray(new Long[0]));
 
-    protected void submitTimelineThreads(Long[] ids) {
+    executor.shutdown();
 
-        Twitter client = getTwitterClient();
+  }
 
-        for(int i = 0; i < ids.length; i++) {
+  public boolean shouldContinuePulling(List<Status> statuses) {
+    return (statuses != null) && (statuses.size() > 0);
+  }
 
-            TwitterTimelineProviderTask providerTask = new TwitterTimelineProviderTask(this, client, ids[i]);
-            ListenableFuture future = executor.submit(providerTask);
-            futures.add(future);
-            LOGGER.info("submitted {}", ids[i]);
-        }
+  protected void submitTimelineThreads(Long[] ids) {
 
-    }
+    Twitter client = getTwitterClient();
 
-    private Collection<Long> retrieveIds(String[] screenNames) {
-        Twitter client = getTwitterClient();
+    for (int i = 0; i < ids.length; i++) {
 
-        List<Long> ids = Lists.newArrayList();
-        try {
-            for (User tStat : client.lookupUsers(screenNames)) {
-                ids.add(tStat.getId());
-            }
-        } catch (TwitterException e) {
-            LOGGER.error("Failure retrieving user details.", e.getMessage());
-        }
-        return ids;
+      TwitterTimelineProviderTask providerTask = new TwitterTimelineProviderTask(this, client, ids[i]);
+      ListenableFuture future = executor.submit(providerTask);
+      futures.add(future);
+      LOGGER.info("submitted {}", ids[i]);
     }
 
-    public StreamsResultSet readCurrent() {
+  }
 
-        StreamsResultSet result;
+  private Collection<Long> retrieveIds(String[] screenNames) {
+    Twitter client = getTwitterClient();
 
-        LOGGER.debug("Providing {} docs", providerQueue.size());
+    List<Long> ids = Lists.newArrayList();
+    try {
+      for (User twitterUser : client.lookupUsers(screenNames)) {
+        ids.add(twitterUser.getId());
+      }
+    } catch (TwitterException ex) {
+      LOGGER.error("Failure retrieving user details.", ex.getMessage());
+    }
+    return ids;
+  }
 
-        try {
-            lock.writeLock().lock();
-            result = new StreamsResultSet(providerQueue);
-            result.setCounter(new DatumStatusCounter());
-            providerQueue = constructQueue();
-        } finally {
-            lock.writeLock().unlock();
-        }
+  @Override
+  public StreamsResultSet readCurrent() {
 
-        if( result.size() == 0 && providerQueue.isEmpty() && executor.isTerminated() ) {
-            LOGGER.info("Finished.  Cleaning up...");
+    StreamsResultSet result;
 
-            running.set(false);
+    LOGGER.debug("Providing {} docs", providerQueue.size());
 
-            LOGGER.info("Exiting");
-        }
+    try {
+      lock.writeLock().lock();
+      result = new StreamsResultSet(providerQueue);
+      result.setCounter(new DatumStatusCounter());
+      providerQueue = constructQueue();
+    } finally {
+      lock.writeLock().unlock();
+    }
 
-        return result;
+    if ( result.size() == 0 && providerQueue.isEmpty() && executor.isTerminated() ) {
+      LOGGER.info("Finished.  Cleaning up...");
 
-    }
+      running.set(false);
 
-    protected Queue<StreamsDatum> constructQueue() {
-        return new LinkedBlockingQueue<StreamsDatum>();
+      LOGGER.info("Exiting");
     }
 
-    public StreamsResultSet readNew(BigInteger sequence) {
-        LOGGER.debug("{} readNew", STREAMS_ID);
-        throw new NotImplementedException();
-    }
+    return result;
 
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        LOGGER.debug("{} readRange", STREAMS_ID);
-        throw new NotImplementedException();
-    }
+  }
 
+  protected Queue<StreamsDatum> constructQueue() {
+    return new LinkedBlockingQueue<StreamsDatum>();
+  }
 
+  public StreamsResultSet readNew(BigInteger sequence) {
+    LOGGER.debug("{} readNew", STREAMS_ID);
+    throw new NotImplementedException();
+  }
 
-    /**
-     * Using the "info" list that is contained in the configuration, ensure that all
-     * account identifiers are converted to IDs (Longs) instead of screenNames (Strings)
-     */
-    protected void consolidateToIDs() {
-        List<String> screenNames = Lists.newArrayList();
-        ids = Lists.newArrayList();
-
-        for(String account : config.getInfo()) {
-            try {
-                if (new Long(account) != null) {
-                    ids.add(Long.parseLong(Objects.toString(account, null)));
-                } else {
-                    screenNames.add(account);
-                }
-            } catch (Exception e) {
-                LOGGER.error("Exception while trying to add ID: {{}}, {}", account, e);
-            }
-        }
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    LOGGER.debug("{} readRange", STREAMS_ID);
+    throw new NotImplementedException();
+  }
 
-        // Twitter allows for batches up to 100 per request, but you cannot mix types
-        screenNameBatches = new ArrayList<String[]>();
-        while(screenNames.size() >= 100) {
-            screenNameBatches.add(screenNames.subList(0, 100).toArray(new String[0]));
-            screenNames = screenNames.subList(100, screenNames.size());
-        }
 
-        if(screenNames.size() > 0)
-            screenNameBatches.add(screenNames.toArray(new String[ids.size()]));
 
-        Iterator<String[]> screenNameBatchIterator = screenNameBatches.iterator();
+  /**
+   * Using the "info" list that is contained in the configuration, ensure that all
+   * account identifiers are converted to IDs (Longs) instead of screenNames (Strings).
+   */
+  protected void consolidateToIDs() {
+    List<String> screenNames = Lists.newArrayList();
+    ids = Lists.newArrayList();
 
-        while(screenNameBatchIterator.hasNext()) {
-            Collection<Long> batchIds = retrieveIds(screenNameBatchIterator.next());
-            ids.addAll(batchIds);
+    for (String account : config.getInfo()) {
+      try {
+        if (new Long(account) != null) {
+          ids.add(Long.parseLong(Objects.toString(account, null)));
+        } else {
+          screenNames.add(account);
         }
+      } catch (Exception ex) {
+        LOGGER.error("Exception while trying to add ID: {{}}, {}", account, ex);
+      }
     }
 
-    public Twitter getTwitterClient() {
-
-        String baseUrl = TwitterProviderUtil.baseUrl(config);
-
-        ConfigurationBuilder builder = new ConfigurationBuilder()
-                .setOAuthConsumerKey(config.getOauth().getConsumerKey())
-                .setOAuthConsumerSecret(config.getOauth().getConsumerSecret())
-                .setOAuthAccessToken(config.getOauth().getAccessToken())
-                .setOAuthAccessTokenSecret(config.getOauth().getAccessTokenSecret())
-                .setIncludeEntitiesEnabled(true)
-                .setJSONStoreEnabled(true)
-                .setAsyncNumThreads(3)
-                .setRestBaseURL(baseUrl)
-                .setIncludeMyRetweetEnabled(Boolean.TRUE)
-                .setPrettyDebugEnabled(Boolean.TRUE);
-
-        return new TwitterFactory(builder.build()).getInstance();
+    // Twitter allows for batches up to 100 per request, but you cannot mix types
+    screenNameBatches = new ArrayList<String[]>();
+    while (screenNames.size() >= 100) {
+      screenNameBatches.add(screenNames.subList(0, 100).toArray(new String[0]));
+      screenNames = screenNames.subList(100, screenNames.size());
     }
 
-    @Override
-    public void cleanUp() {
-        shutdownAndAwaitTermination(executor);
+    if (screenNames.size() > 0) {
+      screenNameBatches.add(screenNames.toArray(new String[ids.size()]));
     }
 
-    void shutdownAndAwaitTermination(ExecutorService pool) {
-        pool.shutdown(); // Disable new tasks from being submitted
-        try {
-            // Wait a while for existing tasks to terminate
-            if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
-                pool.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!pool.awaitTermination(10, TimeUnit.SECONDS))
-                    System.err.println("Pool did not terminate");
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            pool.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
-        }
-    }
+    Iterator<String[]> screenNameBatchIterator = screenNameBatches.iterator();
 
-    @Override
-    public boolean isRunning() {
-        if (providerQueue.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
-            LOGGER.info("Completed");
-            running.set(false);
-            LOGGER.info("Exiting");
+    while (screenNameBatchIterator.hasNext()) {
+      Collection<Long> batchIds = retrieveIds(screenNameBatchIterator.next());
+      ids.addAll(batchIds);
+    }
+  }
+
+  /**
+   * get Twitter Client from TwitterUserInformationConfiguration.
+   * @return result
+   */
+  public Twitter getTwitterClient() {
+
+    String baseUrl = TwitterProviderUtil.baseUrl(config);
+
+    ConfigurationBuilder builder = new ConfigurationBuilder()
+        .setOAuthConsumerKey(config.getOauth().getConsumerKey())
+        .setOAuthConsumerSecret(config.getOauth().getConsumerSecret())
+        .setOAuthAccessToken(config.getOauth().getAccessToken())
+        .setOAuthAccessTokenSecret(config.getOauth().getAccessTokenSecret())
+        .setIncludeEntitiesEnabled(true)
+        .setJSONStoreEnabled(true)
+        .setAsyncNumThreads(3)
+        .setRestBaseURL(baseUrl)
+        .setIncludeMyRetweetEnabled(Boolean.TRUE)
+        .setPrettyDebugEnabled(Boolean.TRUE);
+
+    return new TwitterFactory(builder.build()).getInstance();
+  }
+
+  @Override
+  public void cleanUp() {
+    shutdownAndAwaitTermination(executor);
+  }
+
+  void shutdownAndAwaitTermination(ExecutorService pool) {
+    pool.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+        pool.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+          System.err.println("Pool did not terminate");
         }
-        return running.get();
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      pool.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
+    }
+  }
+
+  @Override
+  public boolean isRunning() {
+    if (providerQueue.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
+      LOGGER.info("Completed");
+      running.set(false);
+      LOGGER.info("Exiting");
     }
+    return running.get();
+  }
 }


[22/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosHeartbeatStream.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosHeartbeatStream.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosHeartbeatStream.java
index 64cc0e8..71447cb 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosHeartbeatStream.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosHeartbeatStream.java
@@ -19,8 +19,9 @@
 
 package org.apache.streams.sysomos.provider;
 
-import com.sysomos.xml.BeatApi;
 import org.apache.streams.core.StreamsDatum;
+
+import com.sysomos.xml.BeatApi;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -30,208 +31,228 @@ import org.slf4j.LoggerFactory;
  */
 public class SysomosHeartbeatStream implements Runnable {
 
-    private static enum OperatingMode { DATE, DOC_MATCH}
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(SysomosHeartbeatStream.class);
-
-    private final SysomosProvider provider;
-    private final SysomosClient client;
-    private final String heartbeatId;
-    private final long maxApiBatch;
-    private final long minLatency;
-    private final OperatingMode mode;
-
-    private String lastID;
-    private DateTime beforeTime;
-    private DateTime afterTime;
-    private DateTime lastRunTime;
-    private int offsetCount = 0;
-    private boolean enabled = true;
-
-    public SysomosHeartbeatStream(SysomosProvider provider, String heartbeatId) {
-        this(provider, heartbeatId, null, DateTime.now());
+  private enum OperatingMode { DATE, DOC_MATCH }
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SysomosHeartbeatStream.class);
+
+  private final SysomosProvider provider;
+  private final SysomosClient client;
+  private final String heartbeatId;
+  private final long maxApiBatch;
+  private final long minLatency;
+  private final OperatingMode mode;
+
+  private String lastId;
+  private DateTime beforeTime;
+  private DateTime afterTime;
+  private DateTime lastRunTime;
+  private int offsetCount = 0;
+  private boolean enabled = true;
+
+  public SysomosHeartbeatStream(SysomosProvider provider, String heartbeatId) {
+    this(provider, heartbeatId, null, DateTime.now());
+  }
+
+  /**
+   * SysomosHeartbeatStream constructor.
+   * @param provider SysomosProvider
+   * @param heartbeatId heartbeatId
+   * @param beforeTime DateTime
+   * @param afterTime DateTime
+   */
+  public SysomosHeartbeatStream(SysomosProvider provider, String heartbeatId, DateTime beforeTime, DateTime afterTime) {
+    this(provider, heartbeatId, OperatingMode.DATE);
+    this.beforeTime = beforeTime;
+    this.afterTime = afterTime;
+  }
+
+  /**
+   * SysomosHeartbeatStream constructor.
+   * @param provider SysomosProvider
+   * @param heartbeatId heartbeatId
+   * @param documentId last documentId
+   */
+  public SysomosHeartbeatStream(SysomosProvider provider, String heartbeatId, String documentId) {
+    this(provider, heartbeatId, OperatingMode.DOC_MATCH);
+    this.lastId = documentId;
+  }
+
+  /**
+   * SysomosHeartbeatStream constructor.
+   * @param provider SysomosProvider
+   * @param heartbeatId heartbeatId
+   * @param mode OperatingMode
+   */
+  public SysomosHeartbeatStream(SysomosProvider provider, String heartbeatId, OperatingMode mode) {
+    this.provider = provider;
+    this.heartbeatId = heartbeatId;
+
+    this.client = provider.getClient();
+    this.maxApiBatch = provider.getMaxApiBatch();
+    this.minLatency = provider.getMinLatency();
+    this.mode = mode;
+  }
+
+  @Override
+  public void run() {
+    try {
+      executeRun();
+    } catch (Exception ex) {
+      LOGGER.error("Error executing heartbeat stream", ex);
+      shutdown();
     }
-
-    public SysomosHeartbeatStream(SysomosProvider provider, String heartbeatId, DateTime beforeTime, DateTime afterTime) {
-        this(provider, heartbeatId, OperatingMode.DATE);
-        this.beforeTime = beforeTime;
-        this.afterTime = afterTime;
+  }
+
+  protected void executeRun() {
+    QueryResult result;
+    String mostCurrentId = null;
+    int totalDocCount = 0;
+    lastRunTime = DateTime.now();
+    //Iff we are trying to get to a specific document ID, continue to query after minimum delay
+    do {
+      LOGGER.debug("Querying API to match last ID of {} or time range of {} - {}", lastId, afterTime, beforeTime);
+      result = queryApi();
+      totalDocCount += result.getResponseSize();
+      //Ensure that we are only assigning lastId to the latest ID, even if there is backfill query.
+      //Since offset is calcuated at the end of the run, if we detect the need to backfill, it will increment to 1
+      if (offsetCount == 1) {
+        mostCurrentId = result.getCurrentId();
+      }
+      updateOffset(result);
     }
-
-    public SysomosHeartbeatStream(SysomosProvider provider, String heartbeatId, String documentId) {
-        this(provider, heartbeatId, OperatingMode.DOC_MATCH);
-        this.lastID = documentId;
+    while (offsetCount > 0);
+
+    updateState(result, mostCurrentId, totalDocCount);
+    LOGGER.debug("Completed current execution with a final docID of {} or time of {}", lastId, afterTime);
+  }
+
+  protected void updateState(QueryResult result, String mostCurrentId, int totalDocCount) {
+    if (OperatingMode.DOC_MATCH.equals(mode)) {
+      //Set the last ID so that the next time we are executed we will continue to query only so long as we haven't
+      //found the specific ID
+      lastId = mostCurrentId == null ? result.getCurrentId() : mostCurrentId;
+    } else {
+      //If we didn't see any docs, there might be a lag on the Sysomos side.  Retry.
+      afterTime = totalDocCount == 0 ? afterTime : lastRunTime;
     }
 
-    public SysomosHeartbeatStream(SysomosProvider provider, String heartbeatId, OperatingMode mode) {
-        this.provider = provider;
-        this.heartbeatId = heartbeatId;
-
-        this.client = provider.getClient();
-        this.maxApiBatch = provider.getMaxApiBatch();
-        this.minLatency = provider.getMinLatency();
-        this.mode = mode;
+    if (SysomosProvider.Mode.BACKFILL_AND_TERMINATE.equals(provider.getMode())) {
+      shutdown();
+      LOGGER.info("Completed backfill to {} for heartbeat {}", OperatingMode.DOC_MATCH.equals(mode) ? lastId : afterTime, heartbeatId);
     }
-
-    @Override
-    public void run() {
-        try {
-            executeRun();
-        } catch (Exception e) {
-            LOGGER.error("Error executing heartbeat stream", e);
-            shutdown();
-        }
+  }
+
+  protected void updateOffset(QueryResult result) {
+    if (OperatingMode.DOC_MATCH.equals(mode)) {
+      //Reset the offset iff we have found a match or this is the first execution
+      offsetCount = lastId == null || result.isMatchedLastId() ? 0 : offsetCount + 1;
+    } else {
+      offsetCount = result.getResponseSize() == 0 ? 0 : offsetCount + 1;
     }
-
-    protected void executeRun() {
-        QueryResult result;
-        String mostCurrentId = null;
-        int totalDocCount = 0;
-        lastRunTime = DateTime.now();
-        //Iff we are trying to get to a specific document ID, continue to query after minimum delay
-        do {
-            LOGGER.debug("Querying API to match last ID of {} or time range of {} - {}", lastID, afterTime, beforeTime);
-            result = queryAPI();
-            totalDocCount += result.getResponseSize();
-            //Ensure that we are only assigning lastID to the latest ID, even if there is backfill query.
-            //Since offset is calcuated at the end of the run, if we detect the need to backfill, it will increment to 1
-            if(offsetCount == 1) {
-                mostCurrentId = result.getCurrentId();
-            }
-            updateOffset(result);
-        } while (offsetCount > 0);
-
-        updateState(result, mostCurrentId, totalDocCount);
-        LOGGER.debug("Completed current execution with a final docID of {} or time of {}", lastID, afterTime);
-    }
-
-    protected void updateState(QueryResult result, String mostCurrentId, int totalDocCount) {
-        if(OperatingMode.DOC_MATCH.equals(mode)) {
-            //Set the last ID so that the next time we are executed we will continue to query only so long as we haven't
-            //found the specific ID
-            lastID = mostCurrentId == null ? result.getCurrentId() : mostCurrentId;
-        } else {
-            //If we didn't see any docs, there might be a lag on the Sysomos side.  Retry.
-            afterTime = totalDocCount == 0 ? afterTime : lastRunTime;
-        }
-
-        if(SysomosProvider.Mode.BACKFILL_AND_TERMINATE.equals(provider.getMode())) {
-            shutdown();
-            LOGGER.info("Completed backfill to {} for heartbeat {}", OperatingMode.DOC_MATCH.equals(mode) ? lastID : afterTime, heartbeatId);
-        }
+    if (offsetCount > 0) {
+      sleep();
     }
-
-    protected void updateOffset(QueryResult result) {
-        if(OperatingMode.DOC_MATCH.equals(mode)) {
-            //Reset the offset iff we have found a match or this is the first execution
-            offsetCount = lastID == null || result.isMatchedLastId() ? 0 : offsetCount + 1;
-        } else {
-            offsetCount = result.getResponseSize() == 0 ? 0 : offsetCount + 1;
-        }
-        if(offsetCount > 0) {
-            sleep();
-        }
+  }
+
+  protected void sleep() {
+    try {
+      Thread.sleep(this.minLatency);
+    } catch (InterruptedException ex) {
+      LOGGER.warn("Thread interrupted while sleeping minimum delay", ex);
+      shutdown();
     }
-
-    protected void sleep() {
-        try {
-            Thread.sleep(this.minLatency);
-        } catch (InterruptedException e) {
-            LOGGER.warn("Thread interrupted while sleeping minimum delay", e);
-            shutdown();
+  }
+
+  protected QueryResult queryApi() {
+    BeatApi.BeatResponse response = executeApiRequest();
+
+    String currentId = null;
+    boolean matched = false;
+    int responseSize = 0;
+    if (response != null) {
+      for (BeatApi.BeatResponse.Beat beat : response.getBeat()) {
+        String docId = beat.getDocid();
+        //We get documents in descending time order.  This will set the id to the latest document
+        if (currentId == null) {
+          currentId = docId;
+        }
+        //We only want to process documents that we know we have not seen before
+        if (lastId != null && lastId.equals(docId)) {
+          matched = true;
+          break;
         }
+        StreamsDatum item = new StreamsDatum(beat, docId);
+        item.getMetadata().put("heartbeat", this.heartbeatId);
+        this.provider.enqueueItem(item);
+      }
+      responseSize = response.getCount();
     }
-
-    protected QueryResult queryAPI() {
-        BeatApi.BeatResponse response = executeAPIRequest();
-
-        String currentId = null;
-        boolean matched = false;
-        int responseSize = 0;
-        if(response != null) {
-            for (BeatApi.BeatResponse.Beat beat : response.getBeat()) {
-                String docId = beat.getDocid();
-                //We get documents in descending time order.  This will set the id to the latest document
-                if (currentId == null) {
-                    currentId = docId;
-                }
-                //We only want to process documents that we know we have not seen before
-                if (lastID != null && lastID.equals(docId)) {
-                    matched = true;
-                    break;
-                }
-                StreamsDatum item = new StreamsDatum(beat, docId);
-                item.getMetadata().put("heartbeat", this.heartbeatId);
-                this.provider.enqueueItem(item);
-            }
-            responseSize = response.getCount();
+    return new QueryResult(matched, currentId, responseSize);
+  }
+
+  protected BeatApi.BeatResponse executeApiRequest() {
+    BeatApi.BeatResponse response = null;
+    try {
+      if (enabled) {
+        RequestBuilder requestBuilder = this.client.createRequestBuilder()
+            .setHeartBeatId(heartbeatId)
+            .setOffset(offsetCount * maxApiBatch)
+            .setReturnSetSize(maxApiBatch);
+        if (beforeTime != null) {
+          requestBuilder.setAddedBeforeDate(beforeTime);
         }
-        return new QueryResult(matched, currentId, responseSize);
-    }
-
-    protected BeatApi.BeatResponse executeAPIRequest() {
-        BeatApi.BeatResponse response = null;
-        try {
-            if(enabled) {
-                RequestBuilder requestBuilder = this.client.createRequestBuilder()
-                        .setHeartBeatId(heartbeatId)
-                        .setOffset(offsetCount * maxApiBatch)
-                        .setReturnSetSize(maxApiBatch);
-                if(beforeTime != null) {
-                    requestBuilder.setAddedBeforeDate(beforeTime);
-                }
-                if(afterTime != null) {
-                    requestBuilder.setAddedAfterDate(afterTime);
-                }
-                response = requestBuilder.execute();
-
-                LOGGER.debug("Received {} results from API query", response.getCount());
-            }
-        } catch (Exception e) {
-            LOGGER.warn("Error querying Sysomos API", e);
+        if (afterTime != null) {
+          requestBuilder.setAddedAfterDate(afterTime);
         }
-        return response;
-    }
+        response = requestBuilder.execute();
 
-    protected void shutdown() {
-        provider.signalComplete(heartbeatId);
-        enabled = false;
+        LOGGER.debug("Received {} results from API query", response.getCount());
+      }
+    } catch (Exception ex) {
+      LOGGER.warn("Error querying Sysomos API", ex);
     }
+    return response;
+  }
 
-    protected class QueryResult {
-        private boolean matchedLastId;
-        private String currentId;
-        private int responseSize;
+  protected void shutdown() {
+    provider.signalComplete(heartbeatId);
+    enabled = false;
+  }
 
+  protected class QueryResult {
+    private boolean matchedLastId;
+    private String currentId;
+    private int responseSize;
 
-        public QueryResult(boolean matchedLastId, String currentId, int responseSize) {
-            this.matchedLastId = matchedLastId;
-            this.currentId = currentId;
-            this.responseSize = responseSize;
-        }
 
-        public boolean isMatchedLastId() {
-            return matchedLastId;
-        }
+    public QueryResult(boolean matchedLastId, String currentId, int responseSize) {
+      this.matchedLastId = matchedLastId;
+      this.currentId = currentId;
+      this.responseSize = responseSize;
+    }
 
-        public void setMatchedLastId(boolean matchedLastId) {
-            this.matchedLastId = matchedLastId;
-        }
+    public boolean isMatchedLastId() {
+      return matchedLastId;
+    }
 
-        public String getCurrentId() {
-            return currentId;
-        }
+    public void setMatchedLastId(boolean matchedLastId) {
+      this.matchedLastId = matchedLastId;
+    }
 
-        public void setCurrentId(String currentId) {
-            this.currentId = currentId;
-        }
+    public String getCurrentId() {
+      return currentId;
+    }
 
-        public int getResponseSize() {
-            return responseSize;
-        }
+    public void setCurrentId(String currentId) {
+      this.currentId = currentId;
+    }
 
-        public void setResponseSize(int responseSize) {
-            this.responseSize = responseSize;
-        }
+    public int getResponseSize() {
+      return responseSize;
+    }
+
+    public void setResponseSize(int responseSize) {
+      this.responseSize = responseSize;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosProvider.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosProvider.java
index 824ede2..ec1f317 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosProvider.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/provider/SysomosProvider.java
@@ -19,6 +19,15 @@
 
 package org.apache.streams.sysomos.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfiguration;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProvider;
+import org.apache.streams.core.StreamsResultSet;
+import org.apache.streams.data.util.RFC3339Utils;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
@@ -31,14 +40,6 @@ import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigParseOptions;
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProvider;
-import org.apache.streams.core.StreamsResultSet;
-import org.apache.streams.data.util.RFC3339Utils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -52,7 +53,9 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.Queue;
 import java.util.Set;
-import java.util.concurrent.*;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
@@ -60,310 +63,336 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 /**
  * Streams Provider for the Sysomos Heartbeat API
  *
+ * <p/>
  * Configuration:
- * The provider takes either a Map<String,Object> containing the mode (backfill and terminate OR continuous) and a
- * Map<String,String> of heartbeat IDs to document target ids or a string of the format ${heartbeatId}:${documentId},...,${heartbeatId}:${documentId}
+ * The provider takes either a Map[String,Object] containing the mode (backfill and terminate OR continuous) and a
+ * Map[String,String] of heartbeat IDs to document target ids or a string of the format
+ *   ${heartbeatId}:${documentId},...,${heartbeatId}:${documentId}
  * This configuration will configure the provider to backfill to the specified document and either terminate or not
  * depending on the mode flag.  Continuous mode is assumed, and is the ony mode supported by the String configuration.
  *
- *  To use from command line:
- *
- *  Supply configuration similar to src/test/resources/rss.conf
- *
- *  Launch using:
- *
- *  mvn exec:java -Dexec.mainClass=org.apache.streams.rss.provider.RssStreamProvider -Dexec.args="rss.conf articles.json"
  */
 public class SysomosProvider implements StreamsProvider {
 
-    public static final String STREAMS_ID = "SysomosProvider";
-
-    public static enum Mode { CONTINUOUS, BACKFILL_AND_TERMINATE }
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(SysomosProvider.class);
-
-    public static final String ENDING_TIME_KEY = "addedBefore";
-    public static final String STARTING_TIME_KEY = "addedAfter";
-    public static final String MODE_KEY = "mode";
-    public static final String STARTING_DOCS_KEY = "startingDocs";
-    public static final int LATENCY = 10000;  //Default minLatency for querying the Sysomos API in milliseconds
-    public static final long PROVIDER_BATCH_SIZE = 10000L; //Default maximum size of the queue
-    public static final long API_BATCH_SIZE = 1000L; //Default maximum size of an API request
-
-    protected volatile Queue<StreamsDatum> providerQueue;
-
-    private final ReadWriteLock lock = new ReentrantReadWriteLock();
-    private final Set<String> completedHeartbeats = Sets.newHashSet();
-    private final long maxQueued;
-    private final long minLatency;
-    private final long scheduledLatency;
-    private final long maxApiBatch;
-
-    private SysomosClient client;
-    private SysomosConfiguration config;
-    private ScheduledExecutorService stream;
-    private Map<String, String> documentIds;
-    private Map<String, String> addedBefore;
-    private Map<String, String> addedAfter;
-    private Mode mode = Mode.CONTINUOUS;
-    private boolean started = false;
-    private AtomicInteger count;
-
-    public SysomosProvider(SysomosConfiguration sysomosConfiguration) {
-        this.config = sysomosConfiguration;
-        this.client = new SysomosClient(sysomosConfiguration.getApiKey());
-        this.maxQueued = sysomosConfiguration.getMaxBatchSize() == null ? PROVIDER_BATCH_SIZE : sysomosConfiguration.getMaxBatchSize();
-        this.minLatency = sysomosConfiguration.getMinDelayMs() == null ? LATENCY : sysomosConfiguration.getMinDelayMs();
-        this.scheduledLatency = sysomosConfiguration.getScheduledDelayMs() == null ? (LATENCY * 15) : sysomosConfiguration.getScheduledDelayMs();
-        this.maxApiBatch = sysomosConfiguration.getMinDelayMs() == null ? API_BATCH_SIZE : sysomosConfiguration.getApiBatchSize();
-        this.count = new AtomicInteger();
-    }
-
-    public SysomosConfiguration getConfig() {
-        return config;
-    }
-
-    public void setConfig(SysomosConfiguration config) {
-        this.config = config;
-    }
-
-    public Mode getMode() {
-        return mode;
-    }
-
-    public long getMinLatency() {
-        return minLatency;
-    }
-
-    public long getMaxApiBatch() {
-        return maxApiBatch;
+  public static final String STREAMS_ID = "SysomosProvider";
+
+  public static enum Mode { CONTINUOUS, BACKFILL_AND_TERMINATE }
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SysomosProvider.class);
+
+  public static final String ENDING_TIME_KEY = "addedBefore";
+  public static final String STARTING_TIME_KEY = "addedAfter";
+  public static final String MODE_KEY = "mode";
+  public static final String STARTING_DOCS_KEY = "startingDocs";
+  public static final int LATENCY = 10000;  //Default minLatency for querying the Sysomos API in milliseconds
+  public static final long PROVIDER_BATCH_SIZE = 10000L; //Default maximum size of the queue
+  public static final long API_BATCH_SIZE = 1000L; //Default maximum size of an API request
+
+  protected volatile Queue<StreamsDatum> providerQueue;
+
+  private final ReadWriteLock lock = new ReentrantReadWriteLock();
+  private final Set<String> completedHeartbeats = Sets.newHashSet();
+  private final long maxQueued;
+  private final long minLatency;
+  private final long scheduledLatency;
+  private final long maxApiBatch;
+
+  private SysomosClient client;
+  private SysomosConfiguration config;
+  private ScheduledExecutorService stream;
+  private Map<String, String> documentIds;
+  private Map<String, String> addedBefore;
+  private Map<String, String> addedAfter;
+  private Mode mode = Mode.CONTINUOUS;
+  private boolean started = false;
+  private AtomicInteger count;
+
+  /**
+   * SysomosProvider constructor.
+   * @param sysomosConfiguration SysomosConfiguration
+   */
+  public SysomosProvider(SysomosConfiguration sysomosConfiguration) {
+    this.config = sysomosConfiguration;
+    this.client = new SysomosClient(sysomosConfiguration.getApiKey());
+    this.maxQueued = sysomosConfiguration.getMaxBatchSize() == null ? PROVIDER_BATCH_SIZE : sysomosConfiguration.getMaxBatchSize();
+    this.minLatency = sysomosConfiguration.getMinDelayMs() == null ? LATENCY : sysomosConfiguration.getMinDelayMs();
+    this.scheduledLatency = sysomosConfiguration.getScheduledDelayMs() == null
+        ? (LATENCY * 15) : sysomosConfiguration.getScheduledDelayMs();
+    this.maxApiBatch = sysomosConfiguration.getMinDelayMs() == null ? API_BATCH_SIZE : sysomosConfiguration.getApiBatchSize();
+    this.count = new AtomicInteger();
+  }
+
+  public SysomosConfiguration getConfig() {
+    return config;
+  }
+
+  public void setConfig(SysomosConfiguration config) {
+    this.config = config;
+  }
+
+  public Mode getMode() {
+    return mode;
+  }
+
+  public long getMinLatency() {
+    return minLatency;
+  }
+
+  public long getMaxApiBatch() {
+    return maxApiBatch;
+  }
+
+  public SysomosClient getClient() {
+    return client;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+    LOGGER.trace("Starting Producer");
+    if (!started) {
+      LOGGER.trace("Producer not started.  Initializing");
+      stream = Executors.newScheduledThreadPool(getConfig().getHeartbeatIds().size() + 1);
+      for (String heartbeatId : getConfig().getHeartbeatIds()) {
+        Runnable task = createStream(heartbeatId);
+        stream.scheduleWithFixedDelay(task, 0, this.scheduledLatency, TimeUnit.MILLISECONDS);
+        LOGGER.info("Started producer task for heartbeat {}", heartbeatId);
+      }
+      started = true;
     }
-
-    public SysomosClient getClient() {
-        return client;
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    StreamsResultSet current;
+    try {
+      lock.writeLock().lock();
+      LOGGER.debug("Creating new result set for {} items", providerQueue.size());
+      count.addAndGet(providerQueue.size());
+      current = new StreamsResultSet(providerQueue);
+      providerQueue = constructQueue();
+    } finally {
+      lock.writeLock().unlock();
     }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+    return current;
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger bigInteger) {
+    throw new NotImplementedException("readNew not currently implemented");
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime dateTime, DateTime dateTime2) {
+    throw new NotImplementedException("readRange not currently implemented");
+  }
+
+  //If the provider queue still has data, we are still running.  If not, we are running if we have not been signaled
+  //by all completed heartbeats so long as the thread pool is alive
+  @Override
+  public boolean isRunning() {
+    return providerQueue.size() > 0
+        || (completedHeartbeats.size() < this.getConfig().getHeartbeatIds().size()
+            && !(stream.isTerminated()
+        || stream.isShutdown()));
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.providerQueue = constructQueue();
+    if (configurationObject instanceof Map) {
+      extractConfigFromMap((Map) configurationObject);
+    } else if (configurationObject instanceof String) {
+      documentIds = Splitter.on(";").trimResults().withKeyValueSeparator("=").split((String)configurationObject);
     }
-
-    @Override
-    public void startStream() {
-        LOGGER.trace("Starting Producer");
-        if (!started) {
-            LOGGER.trace("Producer not started.  Initializing");
-            stream = Executors.newScheduledThreadPool(getConfig().getHeartbeatIds().size() + 1);
-            for (String heartbeatId : getConfig().getHeartbeatIds()) {
-                Runnable task = createStream(heartbeatId);
-                stream.scheduleWithFixedDelay(task, 0, this.scheduledLatency, TimeUnit.MILLISECONDS);
-                LOGGER.info("Started producer task for heartbeat {}", heartbeatId);
-            }
-            started = true;
+  }
+
+  @Override
+  public void cleanUp() {
+    stream.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!stream.awaitTermination(60, TimeUnit.SECONDS)) {
+        stream.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!stream.awaitTermination(60, TimeUnit.SECONDS)) {
+          LOGGER.error("Stream did not terminate");
         }
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      stream.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
     }
-
-    @Override
-    public StreamsResultSet readCurrent() {
-        StreamsResultSet current;
-        try {
-            lock.writeLock().lock();
-            LOGGER.debug("Creating new result set for {} items", providerQueue.size());
-            count.addAndGet(providerQueue.size());
-            current = new StreamsResultSet(providerQueue);
-            providerQueue = constructQueue();
-        } finally {
-            lock.writeLock().unlock();
-        }
-
-        return current;
+  }
+
+  /**
+   * signalComplete.
+   * @param heartbeatId heartbeatId
+   */
+  public void signalComplete(String heartbeatId) {
+    try {
+      this.lock.writeLock().lock();
+      this.completedHeartbeats.add(heartbeatId);
+      if (!this.isRunning()) {
+        this.cleanUp();
+      }
+    } finally {
+      this.lock.writeLock().unlock();
     }
 
-    @Override
-    public StreamsResultSet readNew(BigInteger bigInteger) {
-        throw new NotImplementedException("readNew not currently implemented");
+  }
+
+  protected void enqueueItem(StreamsDatum datum) {
+    boolean success;
+    do {
+      try {
+        pauseForSpace(); //Dont lock before this pause. We don't want to block the readCurrent method
+        lock.readLock().lock();
+        success = providerQueue.offer(datum);
+        Thread.yield();
+      } finally {
+        lock.readLock().unlock();
+      }
     }
+    while (!success);
+  }
 
-    @Override
-    public StreamsResultSet readRange(DateTime dateTime, DateTime dateTime2) {
-        throw new NotImplementedException("readRange not currently implemented");
-    }
+  protected SysomosHeartbeatStream createStream(String heartbeatId) {
+    String afterTime = addedAfter != null && addedAfter.containsKey(heartbeatId) ? addedAfter.get(heartbeatId) : null;
+    String beforeTime = addedBefore != null && addedBefore.containsKey(heartbeatId) ? addedBefore.get(heartbeatId) : null;
 
-    //If the provider queue still has data, we are still running.  If not, we are running if we have not been signaled
-    //by all completed heartbeats so long as the thread pool is alive
-    @Override
-    public boolean isRunning() {
-        return providerQueue.size() > 0 || (completedHeartbeats.size() < this.getConfig().getHeartbeatIds().size() && !(stream.isTerminated() || stream.isShutdown()));
+    if (documentIds != null && documentIds.containsKey(heartbeatId)) {
+      return new SysomosHeartbeatStream(this, heartbeatId, documentIds.get(heartbeatId));
     }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        this.providerQueue = constructQueue();
-        if(configurationObject instanceof Map) {
-            extractConfigFromMap((Map) configurationObject);
-        } else if(configurationObject instanceof String) {
-            documentIds = Splitter.on(";").trimResults().withKeyValueSeparator("=").split((String)configurationObject);
-        }
+    if (afterTime != null) {
+      if (beforeTime != null) {
+        return new SysomosHeartbeatStream(this, heartbeatId, RFC3339Utils.parseToUTC(beforeTime), RFC3339Utils.parseToUTC(afterTime));
+      } else {
+        return new SysomosHeartbeatStream(this, heartbeatId, null, RFC3339Utils.parseToUTC(afterTime));
+      }
     }
-
-    @Override
-    public void cleanUp() {
-        stream.shutdown(); // Disable new tasks from being submitted
-        try {
-            // Wait a while for existing tasks to terminate
-            if (!stream.awaitTermination(60, TimeUnit.SECONDS)) {
-                stream.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!stream.awaitTermination(60, TimeUnit.SECONDS)) {
-                    LOGGER.error("Stream did not terminate");
-                }
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            stream.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
-        }
+    return new SysomosHeartbeatStream(this, heartbeatId);
+  }
+
+  /**
+   * Wait for the queue size to be below threshold before allowing execution to continue on this thread.
+   */
+  protected void pauseForSpace() {
+    while (this.providerQueue.size() >= maxQueued) {
+      LOGGER.trace("Sleeping the current thread due to a full queue");
+      try {
+        Thread.sleep(100);
+        LOGGER.trace("Resuming thread after wait period");
+      } catch (InterruptedException ex) {
+        LOGGER.warn("Thread was interrupted", ex);
+      }
     }
-
-    public void signalComplete(String heartbeatId) {
-        try {
-            this.lock.writeLock().lock();
-            this.completedHeartbeats.add(heartbeatId);
-            if(!this.isRunning()) {
-                this.cleanUp();
-            }
-        } finally {
-            this.lock.writeLock().unlock();
-        }
-
+  }
+
+  @SuppressWarnings("unchecked")
+  protected void extractConfigFromMap(Map configMap) {
+    if (configMap.containsKey(MODE_KEY)) {
+      Object configMode = configMap.get(MODE_KEY);
+      if (!(configMode instanceof Mode)) {
+        throw new IllegalStateException("Invalid configuration.  Mode must be an instance of the Mode enum but was " + configMode);
+      }
+      this.mode = (Mode)configMode;
     }
-
-    protected void enqueueItem(StreamsDatum datum) {
-        boolean success;
-        do {
-            try {
-                pauseForSpace(); //Dont lock before this pause. We don't want to block the readCurrent method
-                lock.readLock().lock();
-                success = providerQueue.offer(datum);
-                Thread.yield();
-            }finally {
-                lock.readLock().unlock();
-            }
-        }
-        while (!success);
+    if (configMap.containsKey(STARTING_DOCS_KEY)) {
+      Object configIds = configMap.get(STARTING_DOCS_KEY);
+      if (!(configIds instanceof Map)) {
+        throw new IllegalStateException("Invalid configuration.  StartingDocs must be an instance of Map<String,String> but was "
+            + configIds);
+      }
+      this.documentIds = (Map)configIds;
     }
-
-    protected SysomosHeartbeatStream createStream(String heartbeatId) {
-        String afterTime = addedAfter != null && addedAfter.containsKey(heartbeatId) ? addedAfter.get(heartbeatId) : null;
-        String beforeTime = addedBefore != null && addedBefore.containsKey(heartbeatId) ? addedBefore.get(heartbeatId) : null;
-
-        if(documentIds != null && documentIds.containsKey(heartbeatId)) {
-            return new SysomosHeartbeatStream(this, heartbeatId, documentIds.get(heartbeatId));
-        }
-        if(afterTime != null) {
-            if(beforeTime != null) {
-                return new SysomosHeartbeatStream(this, heartbeatId, RFC3339Utils.parseToUTC(beforeTime), RFC3339Utils.parseToUTC(afterTime));
-            } else {
-                return new SysomosHeartbeatStream(this, heartbeatId, null, RFC3339Utils.parseToUTC(afterTime));
-            }
-        }
-        return new SysomosHeartbeatStream(this, heartbeatId);
+    if (configMap.containsKey(STARTING_TIME_KEY)) {
+      Object configIds = configMap.get(STARTING_TIME_KEY);
+      if (!(configIds instanceof Map)) {
+        throw new IllegalStateException("Invalid configuration.  Added after key must be an instance of Map<String,String> but was "
+            + configIds);
+      }
+      this.addedAfter = (Map)configIds;
     }
-
-    /**
-     * Wait for the queue size to be below threshold before allowing execution to continue on this thread
-     */
-    protected void pauseForSpace() {
-        while(this.providerQueue.size() >= maxQueued) {
-            LOGGER.trace("Sleeping the current thread due to a full queue");
-            try {
-                Thread.sleep(100);
-                LOGGER.trace("Resuming thread after wait period");
-            } catch (InterruptedException e) {
-                LOGGER.warn("Thread was interrupted", e);
-            }
-        }
+    if (configMap.containsKey(ENDING_TIME_KEY)) {
+      Object configIds = configMap.get(ENDING_TIME_KEY);
+      if (!(configIds instanceof Map)) {
+        throw new IllegalStateException("Invalid configuration.  Added before key must be an instance of Map<String,String> but was "
+            + configIds);
+      }
+      this.addedBefore = (Map)configIds;
     }
-
-    @SuppressWarnings("unchecked")
-    protected void extractConfigFromMap(Map configMap) {
-        if(configMap.containsKey(MODE_KEY)) {
-            Object configMode = configMap.get(MODE_KEY);
-            if(!(configMode instanceof Mode)) {
-                throw new IllegalStateException("Invalid configuration.  Mode must be an instance of the Mode enum but was " + configMode);
-            }
-            this.mode = (Mode)configMode;
-        }
-        if(configMap.containsKey(STARTING_DOCS_KEY)) {
-            Object configIds = configMap.get(STARTING_DOCS_KEY);
-            if(!(configIds instanceof Map)) {
-                throw new IllegalStateException("Invalid configuration.  StartingDocs must be an instance of Map<String,String> but was " + configIds);
-            }
-            this.documentIds = (Map)configIds;
-        }
-        if(configMap.containsKey(STARTING_TIME_KEY)) {
-            Object configIds = configMap.get(STARTING_TIME_KEY);
-            if(!(configIds instanceof Map)) {
-                throw new IllegalStateException("Invalid configuration.  Added after key must be an instance of Map<String,String> but was " + configIds);
-            }
-            this.addedAfter = (Map)configIds;
-        }
-        if(configMap.containsKey(ENDING_TIME_KEY)) {
-            Object configIds = configMap.get(ENDING_TIME_KEY);
-            if(!(configIds instanceof Map)) {
-                throw new IllegalStateException("Invalid configuration.  Added before key must be an instance of Map<String,String> but was " + configIds);
-            }
-            this.addedBefore = (Map)configIds;
+  }
+
+  private Queue<StreamsDatum> constructQueue() {
+    return Queues.newConcurrentLinkedQueue();
+  }
+
+  public int getCount() {
+    return this.count.get();
+  }
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply configuration similar to src/test/resources/rss.conf
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.rss.provider.RssStreamProvider -Dexec.args="rss.conf articles.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    SysomosConfiguration config = new ComponentConfigurator<>(SysomosConfiguration.class).detectConfiguration(typesafe, "rss");
+    SysomosProvider provider = new SysomosProvider(config);
+
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          json = mapper.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
         }
+      }
     }
-
-    private Queue<StreamsDatum> constructQueue() {
-        return Queues.newConcurrentLinkedQueue();
-    }
-
-    public int getCount() {
-        return this.count.get();
-    }
-
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        SysomosConfiguration config = new ComponentConfigurator<>(SysomosConfiguration.class).detectConfiguration(typesafe, "rss");
-        SysomosProvider provider = new SysomosProvider(config);
-
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = mapper.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
-    }
+    while ( provider.isRunning() );
+    provider.cleanUp();
+    outStream.flush();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/util/SysomosUtils.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/util/SysomosUtils.java b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/util/SysomosUtils.java
index 3b6a843..82d538d 100644
--- a/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/util/SysomosUtils.java
+++ b/streams-contrib/streams-provider-sysomos/src/main/java/org/apache/streams/sysomos/util/SysomosUtils.java
@@ -19,9 +19,10 @@
 
 package org.apache.streams.sysomos.util;
 
+import org.apache.streams.sysomos.SysomosException;
+
 import com.google.common.base.Strings;
 import org.apache.commons.io.IOUtils;
-import org.apache.streams.sysomos.SysomosException;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
 import org.slf4j.Logger;
@@ -36,49 +37,53 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 /**
- * Provides utilities for working with Sysomos
+ * Provides utilities for working with Sysomos.
  */
 public class SysomosUtils {
 
-    public static final Pattern CODE_PATTERN = Pattern.compile("code: ([0-9]+)");
-    public static final DateTimeFormatter SYSOMOS_DATE_FORMATTER = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZoneUTC();
-    private final static Logger LOGGER = LoggerFactory.getLogger(SysomosUtils.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SysomosUtils.class);
+
+  public static final Pattern CODE_PATTERN = Pattern.compile("code: ([0-9]+)");
+  public static final DateTimeFormatter SYSOMOS_DATE_FORMATTER = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZoneUTC();
 
-    private SysomosUtils() {}
+  private SysomosUtils() {}
 
-    /**
-     * Queries the sysomos URL and provides the response as a String
-     *
-     * @param url the Sysomos URL to query
-     * @return valid XML String
-     */
-    public static String queryUrl(URL url) {
-        try {
-            HttpURLConnection cn = (HttpURLConnection) url.openConnection();
-            cn.setRequestMethod("GET");
-            cn.addRequestProperty("Content-Type", "text/xml;charset=UTF-8");
-            cn.setDoInput(true);
-            cn.setDoOutput(false);
-            StringWriter writer = new StringWriter();
-            IOUtils.copy(new InputStreamReader(cn.getInputStream()), writer);
-            writer.flush();
+  /**
+   * Queries the sysomos URL and provides the response as a String.
+   *
+   * @param url the Sysomos URL to query
+   * @return valid XML String
+   */
+  public static String queryUrl(URL url) {
+    try {
+      HttpURLConnection cn = (HttpURLConnection) url.openConnection();
+      cn.setRequestMethod("GET");
+      cn.addRequestProperty("Content-Type", "text/xml;charset=UTF-8");
+      cn.setDoInput(true);
+      cn.setDoOutput(false);
+      StringWriter writer = new StringWriter();
+      IOUtils.copy(new InputStreamReader(cn.getInputStream()), writer);
+      writer.flush();
 
-            String xmlResponse = writer.toString();
-            if (Strings.isNullOrEmpty(xmlResponse)) {
-                throw new SysomosException("XML Response from Sysomos was empty : " + xmlResponse + "\n" + cn.getResponseMessage(), cn.getResponseCode());
-            }
-            return xmlResponse;
-        } catch (IOException e) {
-            LOGGER.error("Error executing request : {}", e, url.toString());
-            String message = e.getMessage();
-            Matcher match = CODE_PATTERN.matcher(message);
-            if(match.find()) {
-                int errorCode = Integer.parseInt(match.group(1));
-                throw new SysomosException(message, e, errorCode);
-            }
-            else {
-                throw new SysomosException(e.getMessage(), e);
-            }
-        }
+      String xmlResponse = writer.toString();
+      if (Strings.isNullOrEmpty(xmlResponse)) {
+        throw new SysomosException("XML Response from Sysomos was empty : "
+            + xmlResponse
+            + "\n"
+            + cn.getResponseMessage(),
+            cn.getResponseCode());
+      }
+      return xmlResponse;
+    } catch (IOException ex) {
+      LOGGER.error("Error executing request : {}", ex, url.toString());
+      String message = ex.getMessage();
+      Matcher match = CODE_PATTERN.matcher(message);
+      if (match.find()) {
+        int errorCode = Integer.parseInt(match.group(1));
+        throw new SysomosException(message, ex, errorCode);
+      } else {
+        throw new SysomosException(ex.getMessage(), ex);
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosJsonSerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosJsonSerDeIT.java b/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosJsonSerDeIT.java
index e3b4848..7efffcc 100644
--- a/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosJsonSerDeIT.java
+++ b/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosJsonSerDeIT.java
@@ -22,7 +22,6 @@ import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.sysomos.json.Sysomos;
 import org.junit.Assert;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -36,34 +35,34 @@ import java.io.InputStreamReader;
  */
 public class SysomosJsonSerDeIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SysomosJsonSerDeIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SysomosJsonSerDeIT.class);
 
-    private ObjectMapper mapper = new ObjectMapper();
+  private ObjectMapper mapper = new ObjectMapper();
 
-    @Test
-    public void Test()
-    {
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.FALSE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+  @Test
+  public void testSysomosJsonSerDe() {
 
-        InputStream is = SysomosJsonSerDeIT.class.getResourceAsStream("/sysomos_jsons.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.FALSE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
 
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                LOGGER.debug(line);
+    InputStream is = SysomosJsonSerDeIT.class.getResourceAsStream("/sysomos_jsons.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
 
-                Sysomos ser = mapper.readValue(line, Sysomos.class);
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        LOGGER.debug(line);
 
-                String des = mapper.writeValueAsString(ser);
-                LOGGER.debug(des);
-            }
-        } catch( Exception e ) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+        Sysomos ser = mapper.readValue(line, Sysomos.class);
+
+        String des = mapper.writeValueAsString(ser);
+        LOGGER.debug(des);
+      }
+    } catch ( Exception ex ) {
+      ex.printStackTrace();
+      Assert.fail();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosXmlSerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosXmlSerDeIT.java b/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosXmlSerDeIT.java
index b9ee2e1..e078d02 100644
--- a/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosXmlSerDeIT.java
+++ b/streams-contrib/streams-provider-sysomos/src/test/java/com/sysomos/test/SysomosXmlSerDeIT.java
@@ -27,7 +27,6 @@ import com.fasterxml.jackson.dataformat.xml.XmlMapper;
 import com.sysomos.xml.BeatApi;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -41,60 +40,63 @@ import java.io.InputStreamReader;
  */
 public class SysomosXmlSerDeIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SysomosXmlSerDeIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SysomosXmlSerDeIT.class);
 
-    private XmlMapper xmlMapper;
+  private XmlMapper xmlMapper;
 
-    @Before
-    public void Before() {
+  /**
+   * before.
+   */
+  @Before
+  public void before() {
 
-        XmlFactory f = new XmlFactory(new InputFactoryImpl(),
-                new OutputFactoryImpl());
+    XmlFactory xmlFactory = new XmlFactory(new InputFactoryImpl(),
+        new OutputFactoryImpl());
 
-        JacksonXmlModule module = new JacksonXmlModule();
+    JacksonXmlModule module = new JacksonXmlModule();
 
-        module.setDefaultUseWrapper(false);
+    module.setDefaultUseWrapper(false);
 
-        xmlMapper = new XmlMapper(f, module);
+    xmlMapper = new XmlMapper(xmlFactory, module);
 
-        xmlMapper
-                .configure(
-                        DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
-                        Boolean.TRUE);
-        xmlMapper
-                .configure(
-                        DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
-                        Boolean.TRUE);
-        xmlMapper
-                .configure(
-                        DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
-                        Boolean.TRUE);
-        xmlMapper.configure(
-                DeserializationFeature.READ_ENUMS_USING_TO_STRING,
-                Boolean.TRUE);
+    xmlMapper
+        .configure(
+            DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY,
+            Boolean.TRUE);
+    xmlMapper
+        .configure(
+            DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT,
+            Boolean.TRUE);
+    xmlMapper
+        .configure(
+            DeserializationFeature.USE_JAVA_ARRAY_FOR_JSON_ARRAY,
+            Boolean.TRUE);
+    xmlMapper.configure(
+        DeserializationFeature.READ_ENUMS_USING_TO_STRING,
+        Boolean.TRUE);
 
-    }
+  }
+
+  @Test
+  public void test() {
+
+    InputStream is = SysomosXmlSerDeIT.class.getResourceAsStream("/sysomos_xmls.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
+
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        LOGGER.debug(line);
+
+        BeatApi ser = xmlMapper.readValue(line, BeatApi.class);
 
-    @Test
-    public void Test()
-    {
-        InputStream is = SysomosXmlSerDeIT.class.getResourceAsStream("/sysomos_xmls.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
-
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                LOGGER.debug(line);
-
-                BeatApi ser = xmlMapper.readValue(line, BeatApi.class);
-
-                String des = xmlMapper.writeValueAsString(ser);
-                LOGGER.debug(des);
-            }
-        } catch( Exception e ) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+        String des = xmlMapper.writeValueAsString(ser);
+        LOGGER.debug(des);
+      }
+    } catch ( Exception ex ) {
+      ex.printStackTrace();
+      Assert.fail();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-sysomos/src/test/java/org/apache/streams/sysomos/test/provider/SysomosProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-sysomos/src/test/java/org/apache/streams/sysomos/test/provider/SysomosProviderIT.java b/streams-contrib/streams-provider-sysomos/src/test/java/org/apache/streams/sysomos/test/provider/SysomosProviderIT.java
index b4289ee..a088726 100644
--- a/streams-contrib/streams-provider-sysomos/src/test/java/org/apache/streams/sysomos/test/provider/SysomosProviderIT.java
+++ b/streams-contrib/streams-provider-sysomos/src/test/java/org/apache/streams/sysomos/test/provider/SysomosProviderIT.java
@@ -18,63 +18,49 @@
 
 package org.apache.streams.sysomos.test.provider;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
-import com.sysomos.SysomosConfiguration;
-import org.apache.commons.lang.StringUtils;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.sysomos.provider.SysomosProvider;
-import org.junit.Assert;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.FileReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.io.LineNumberReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.util.List;
 
 /**
- * Integration test for SysomosProviderIT
- *
- * Created by sblackmon on 10/21/16.
+ * Integration test for SysomosProviderIT.
  */
 @Ignore("this is ignored because the project doesn't have credentials to test it with during CI")
 public class SysomosProviderIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SysomosProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SysomosProviderIT.class);
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Test
-    public void testRssStreamProvider() throws Exception {
+  @Test
+  public void testRssStreamProvider() throws Exception {
 
-        String configfile = "./target/test-classes/RssStreamProviderIT.conf";
-        String outfile = "./target/test-classes/RssStreamProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/RssStreamProviderIT.conf";
+    String outfile = "./target/test-classes/RssStreamProviderIT.stdout.txt";
 
-        SysomosProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
+    SysomosProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 1);
+    assert (outCounter.getLineNumber() >= 1);
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/StreamsTwitterMapper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/StreamsTwitterMapper.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/StreamsTwitterMapper.java
index aca185c..671a830 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/StreamsTwitterMapper.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/StreamsTwitterMapper.java
@@ -18,69 +18,87 @@
 
 package org.apache.streams.twitter.converter;
 
+import org.apache.streams.data.util.RFC3339Utils;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.twitter.converter.util.TwitterActivityUtil;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
 import com.fasterxml.jackson.databind.module.SimpleModule;
-import org.apache.streams.data.util.RFC3339Utils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.joda.time.DateTime;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 
 /**
  * This class assist with handling twitter's date-time format during conversion
  *
+ * <p/>
  * Deprecated: use StreamsJacksonMapper.getInstance() with TwitterDateTimeFormat on the classpath instead
  */
 @Deprecated
 public class StreamsTwitterMapper extends StreamsJacksonMapper {
 
-    public static final String TWITTER_FORMAT = "EEE MMM dd HH:mm:ss Z yyyy";
+  private static final Logger LOGGER = LoggerFactory.getLogger(TwitterActivityUtil.class);
 
-    public static final DateTimeFormatter TWITTER_FORMATTER = DateTimeFormat.forPattern(TWITTER_FORMAT);
+  public static final String TWITTER_FORMAT = "EEE MMM dd HH:mm:ss Z yyyy";
 
-    public static Long getMillis(String dateTime) {
+  public static final DateTimeFormatter TWITTER_FORMATTER = DateTimeFormat.forPattern(TWITTER_FORMAT);
 
-        // this function is for pig which doesn't handle exceptions well
-        try {
-            return TWITTER_FORMATTER.parseMillis(dateTime);
-        } catch( Exception e ) {
-            return null;
-        }
+  /**
+   * Convert to millis with TWITTER_FORMATTER.
+   * @param dateTime dateTime as String
+   * @return millis as Long
+   */
+  public static Long getMillis(String dateTime) {
 
+    // this function is for pig which doesn't handle exceptions well
+    try {
+      return TWITTER_FORMATTER.parseMillis(dateTime);
+    } catch ( Exception ex ) {
+      return null;
     }
 
-    private static final StreamsTwitterMapper INSTANCE = new StreamsTwitterMapper();
+  }
 
-    public static StreamsTwitterMapper getInstance(){
-        return INSTANCE;
-    }
+  private static final StreamsTwitterMapper INSTANCE = new StreamsTwitterMapper();
+
+  public static StreamsTwitterMapper getInstance() {
+    return INSTANCE;
+  }
 
-    public StreamsTwitterMapper() {
-        super();
-        registerModule(new SimpleModule()
-        {
-            {
-                addDeserializer(DateTime.class, new StdDeserializer<DateTime>(DateTime.class) {
-                    @Override
-                    public DateTime deserialize(JsonParser jpar, DeserializationContext context) throws IOException, JsonProcessingException {
-                        DateTime result = null;
-                        try {
-                            result = TWITTER_FORMATTER.parseDateTime(jpar.getValueAsString());
-                        } catch( Exception ignored ) { }
-                        try {
-                            result = RFC3339Utils.getInstance().parseToUTC(jpar.getValueAsString());
-                        } catch( Exception ignored ) { }
-                        return result;
-                    }
-                });
+  /**
+   * StreamsTwitterMapper constructor.
+   */
+  public StreamsTwitterMapper() {
+    super();
+    registerModule(new SimpleModule() {
+      {
+        addDeserializer(DateTime.class, new StdDeserializer<DateTime>(DateTime.class) {
+          @Override
+          public DateTime deserialize(JsonParser jpar, DeserializationContext context) throws IOException, JsonProcessingException {
+            DateTime result = null;
+            try {
+              result = TWITTER_FORMATTER.parseDateTime(jpar.getValueAsString());
+            } catch ( Exception ignored ) {
+              LOGGER.trace("ignored", ignored);
             }
+            try {
+              result = RFC3339Utils.getInstance().parseToUTC(jpar.getValueAsString());
+            } catch ( Exception ignored ) {
+              LOGGER.trace("ignored", ignored);
+            }
+            return result;
+          }
         });
+      }
+    });
 
-    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDateTimeFormat.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDateTimeFormat.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDateTimeFormat.java
index 5a34868..d8da2c1 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDateTimeFormat.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDateTimeFormat.java
@@ -20,12 +20,15 @@ package org.apache.streams.twitter.converter;
 
 import org.apache.streams.jackson.StreamsDateTimeFormat;
 
+/**
+ * TwitterDateTimeFormat.
+ */
 public class TwitterDateTimeFormat implements StreamsDateTimeFormat {
 
-    public static final String TWITTER_FORMAT = "EEE MMM dd HH:mm:ss Z yyyy";
+  public static final String TWITTER_FORMAT = "EEE MMM dd HH:mm:ss Z yyyy";
 
-    @Override
-    public String getFormat() {
-        return TWITTER_FORMAT;
-    }
+  @Override
+  public String getFormat() {
+    return TWITTER_FORMAT;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDocumentClassifier.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDocumentClassifier.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDocumentClassifier.java
index 3c71f9a..f555e8d 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDocumentClassifier.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterDocumentClassifier.java
@@ -18,10 +18,6 @@
 
 package org.apache.streams.twitter.converter;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import org.apache.streams.data.DocumentClassifier;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.twitter.pojo.Delete;
@@ -32,6 +28,11 @@ import org.apache.streams.twitter.pojo.Tweet;
 import org.apache.streams.twitter.pojo.User;
 import org.apache.streams.twitter.pojo.UserstreamEvent;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -39,47 +40,53 @@ import java.util.List;
 import static org.apache.streams.twitter.converter.TwitterDateTimeFormat.TWITTER_FORMAT;
 
 /**
- * Ensures twitter documents can be converted to Activity
+ * Ensures twitter documents can be converted to Activity.
  */
 public class TwitterDocumentClassifier implements DocumentClassifier {
 
-    public List<Class> detectClasses(Object document) {
+  @Override
+  public List<Class> detectClasses(Object document) {
 
-        Preconditions.checkNotNull(document);
+    Preconditions.checkNotNull(document);
 
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TWITTER_FORMAT));
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(TWITTER_FORMAT));
 
-        ObjectNode objectNode;
-        try {
-            if( document instanceof String )
-                objectNode = mapper.readValue((String)document, ObjectNode.class);
-            else if( document instanceof ObjectNode )
-                objectNode = (ObjectNode) document;
-            else
-                objectNode = mapper.convertValue(document, ObjectNode.class);
-        } catch (IOException e) {
-            return new ArrayList<>();
-        }
-
-        List<Class> classList = new ArrayList<>();
+    ObjectNode objectNode;
+    try {
+      if ( document instanceof String ) {
+        objectNode = mapper.readValue((String) document, ObjectNode.class);
+      } else if ( document instanceof ObjectNode ) {
+        objectNode = (ObjectNode) document;
+      } else {
+        objectNode = mapper.convertValue(document, ObjectNode.class);
+      }
+    } catch (IOException ex) {
+      return new ArrayList<>();
+    }
 
-        if( objectNode.findValue("retweeted_status") != null && objectNode.get("retweeted_status") != null)
-            classList.add(Retweet.class);
-        else if( objectNode.findValue("delete") != null )
-            classList.add(Delete.class);
-        else if( objectNode.findValue("friends") != null ||
-                 objectNode.findValue("friends_str") != null )
-            classList.add(FriendList.class);
-        else if( objectNode.findValue("target_object") != null )
-            classList.add(UserstreamEvent.class);
-        else if( objectNode.findValue("follower") != null && objectNode.findValue("followee") != null)
-            classList.add(Follow.class);
-        else if ( objectNode.findValue("location") != null && objectNode.findValue("user") == null)
-            classList.add(User.class);
-        else
-            classList.add(Tweet.class);
+    List<Class> classList = new ArrayList<>();
 
-        return classList;
+    if ( objectNode.findValue("retweeted_status") != null
+        && objectNode.get("retweeted_status") != null) {
+      classList.add(Retweet.class);
+    } else if ( objectNode.findValue("delete") != null ) {
+      classList.add(Delete.class);
+    } else if ( objectNode.findValue("friends") != null
+        || objectNode.findValue("friends_str") != null ) {
+      classList.add(FriendList.class);
+    } else if ( objectNode.findValue("target_object") != null ) {
+      classList.add(UserstreamEvent.class);
+    } else if ( objectNode.findValue("follower") != null
+        && objectNode.findValue("followee") != null) {
+      classList.add(Follow.class);
+    } else if ( objectNode.findValue("location") != null
+        && objectNode.findValue("user") == null) {
+      classList.add(User.class);
+    } else {
+      classList.add(Tweet.class);
     }
 
+    return classList;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterFollowActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterFollowActivityConverter.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterFollowActivityConverter.java
index e0ed4a4..f34c14a 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterFollowActivityConverter.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterFollowActivityConverter.java
@@ -18,67 +18,69 @@
 
 package org.apache.streams.twitter.converter;
 
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Provider;
-import org.apache.streams.twitter.pojo.Follow;
 import org.apache.streams.twitter.converter.util.TwitterActivityUtil;
+import org.apache.streams.twitter.pojo.Follow;
+
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.NotImplementedException;
 
 import java.io.Serializable;
 import java.util.List;
 
 public class TwitterFollowActivityConverter implements ActivityConverter<Follow>, Serializable {
 
-    public TwitterFollowActivityConverter() {
-    }
-
-    private static TwitterFollowActivityConverter instance = new TwitterFollowActivityConverter();
-
-    public static TwitterFollowActivityConverter getInstance() {
-        return instance;
-    }
-
-    public static Class requiredClass = Follow.class;
-
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
-
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
-
-    @Override
-    public Follow fromActivity(Activity deserialized) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public List<Activity> toActivityList(Follow event) throws ActivityConversionException {
-
-        Activity activity = new Activity();
-        activity.setVerb("follow");
-        activity.setActor(TwitterActivityUtil.buildActor(event.getFollower()));
-        activity.setObject(TwitterActivityUtil.buildActor(event.getFollowee()));
-        activity.setId(activity.getActor().getId() + "-follow->" + activity.getObject().getId());
-        activity.setProvider((Provider) new Provider().withId("twitter"));
-        return Lists.newArrayList(activity);
-    }
-
-    @Override
-    public List<Follow> fromActivityList(List<Activity> list) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public List<Activity> toActivityList(List<Follow> list) {
-        throw new NotImplementedException();
-    }
+  public TwitterFollowActivityConverter() {
+  }
+
+  private static TwitterFollowActivityConverter instance = new TwitterFollowActivityConverter();
+
+  public static TwitterFollowActivityConverter getInstance() {
+    return instance;
+  }
+
+  public static Class requiredClass = Follow.class;
+
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
+
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
+
+  @Override
+  public Follow fromActivity(Activity deserialized) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public List<Follow> fromActivityList(List<Activity> list) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public List<Activity> toActivityList(Follow event) throws ActivityConversionException {
+
+    Activity activity = new Activity();
+    activity.setVerb("follow");
+    activity.setActor(TwitterActivityUtil.buildActor(event.getFollower()));
+    activity.setObject(TwitterActivityUtil.buildActor(event.getFollowee()));
+    activity.setId(activity.getActor().getId() + "-follow->" + activity.getObject().getId());
+    activity.setProvider((Provider) new Provider().withId("twitter"));
+    return Lists.newArrayList(activity);
+  }
+
+  @Override
+  public List<Activity> toActivityList(List<Follow> list) {
+    throw new NotImplementedException();
+  }
+
+
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonDeleteActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonDeleteActivityConverter.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonDeleteActivityConverter.java
index 3e61ef9..ac031b4 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonDeleteActivityConverter.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonDeleteActivityConverter.java
@@ -18,8 +18,6 @@
 
 package org.apache.streams.twitter.converter;
 
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.Activity;
@@ -27,63 +25,63 @@ import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.twitter.pojo.Delete;
 import org.apache.streams.twitter.pojo.Tweet;
 
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.NotImplementedException;
+
 import java.io.Serializable;
 import java.util.List;
 
 import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.updateActivity;
 
-
 /**
-* Created with IntelliJ IDEA.
-* User: mdelaet
-* Date: 9/30/13
-* Time: 9:24 AM
-* To change this template use File | Settings | File Templates.
-*/
+ * TwitterJsonDeleteActivityConverter.
+ */
+//TODO: use class explicitly somewhere
 public class TwitterJsonDeleteActivityConverter implements ActivityConverter<Delete>, Serializable {
 
-    public static Class requiredClass = Delete.class;
+  public static Class requiredClass = Delete.class;
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    private static TwitterJsonDeleteActivityConverter instance = new TwitterJsonDeleteActivityConverter();
+  private static TwitterJsonDeleteActivityConverter instance = new TwitterJsonDeleteActivityConverter();
 
-    public static TwitterJsonDeleteActivityConverter getInstance() {
-        return instance;
-    }
+  public static TwitterJsonDeleteActivityConverter getInstance() {
+    return instance;
+  }
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public Delete fromActivity(Activity deserialized) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
+  @Override
+  public Delete fromActivity(Activity deserialized) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
 
-    @Override
-    public List<Activity> toActivityList(List<Delete> serializedList) {
-        throw new NotImplementedException();
-    }
+  @Override
+  public List<Delete> fromActivityList(List<Activity> list) {
+    throw new NotImplementedException();
+  }
 
-    public List<Activity> toActivityList(Delete delete) throws ActivityConversionException {
+  @Override
+  public List<Activity> toActivityList(List<Delete> serializedList) {
+    throw new NotImplementedException();
+  }
 
-        Activity activity = new Activity();
-        updateActivity(delete, activity);
-        return Lists.newArrayList(activity);
-    }
+  @Override
+  public List<Activity> toActivityList(Delete delete) throws ActivityConversionException {
 
-    @Override
-    public List<Delete> fromActivityList(List<Activity> list) {
-        throw new NotImplementedException();
-    }
+    Activity activity = new Activity();
+    updateActivity(delete, activity);
+    return Lists.newArrayList(activity);
+  }
 
-    public ActivityObject buildTarget(Tweet tweet) {
-        return null;
-    }
+  public ActivityObject buildTarget(Tweet tweet) {
+    return null;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonRetweetActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonRetweetActivityConverter.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonRetweetActivityConverter.java
index 30a1916..13e2568 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonRetweetActivityConverter.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonRetweetActivityConverter.java
@@ -18,13 +18,14 @@
 
 package org.apache.streams.twitter.converter;
 
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.twitter.pojo.Retweet;
 
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.NotImplementedException;
+
 import java.io.Serializable;
 import java.util.List;
 
@@ -32,52 +33,54 @@ import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.upda
 
 public class TwitterJsonRetweetActivityConverter implements ActivityConverter<Retweet>, Serializable {
 
-    public static Class requiredClass = Retweet.class;
-
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
-
-    private static TwitterJsonRetweetActivityConverter instance = new TwitterJsonRetweetActivityConverter();
-
-    public static TwitterJsonRetweetActivityConverter getInstance() {
-        return instance;
-    }
-
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
-
-    @Override
-    public Retweet fromActivity(Activity deserialized) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public List<Activity> toActivityList(Retweet retweet) throws ActivityConversionException {
-
-        Activity activity = new Activity();
-        updateActivity(retweet, activity);
-
-        return Lists.newArrayList(activity);
-    }
-
-    @Override
-    public List<Retweet> fromActivityList(List<Activity> list) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public List<Activity> toActivityList(List<Retweet> serializedList) {
-        List<Activity> result = Lists.newArrayList();
-        for( Retweet item : serializedList ) {
-            try {
-                List<Activity> activities = toActivityList(item);
-                result.addAll(activities);
-            } catch (ActivityConversionException e) {}
-        }
-        return result;
+  public static Class requiredClass = Retweet.class;
+
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
+
+  private static TwitterJsonRetweetActivityConverter instance = new TwitterJsonRetweetActivityConverter();
+
+  public TwitterJsonRetweetActivityConverter getInstance() {
+    return instance;
+  }
+
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
+
+  @Override
+  public Retweet fromActivity(Activity deserialized) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public List<Retweet> fromActivityList(List<Activity> list) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public List<Activity> toActivityList(Retweet retweet) throws ActivityConversionException {
+
+    Activity activity = new Activity();
+    updateActivity(retweet, activity);
+
+    return Lists.newArrayList(activity);
+  }
+
+  @Override
+  public List<Activity> toActivityList(List<Retweet> serializedList) {
+    List<Activity> result = Lists.newArrayList();
+    for ( Retweet item : serializedList ) {
+      try {
+        List<Activity> activities = toActivityList(item);
+        result.addAll(activities);
+      } catch (ActivityConversionException ex) {
+        //
+      }
     }
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonTweetActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonTweetActivityConverter.java b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonTweetActivityConverter.java
index 0997a7f..c3b5b15 100644
--- a/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonTweetActivityConverter.java
+++ b/streams-contrib/streams-provider-twitter/src/main/java/org/apache/streams/twitter/converter/TwitterJsonTweetActivityConverter.java
@@ -18,13 +18,14 @@
 
 package org.apache.streams.twitter.converter;
 
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.twitter.pojo.Tweet;
 
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.NotImplementedException;
+
 import java.io.Serializable;
 import java.util.List;
 
@@ -32,53 +33,55 @@ import static org.apache.streams.twitter.converter.util.TwitterActivityUtil.upda
 
 public class TwitterJsonTweetActivityConverter implements ActivityConverter<Tweet>, Serializable {
 
-    public static Class requiredClass = Tweet.class;
+  public static Class requiredClass = Tweet.class;
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    private static TwitterJsonTweetActivityConverter instance = new TwitterJsonTweetActivityConverter();
+  private static TwitterJsonTweetActivityConverter instance = new TwitterJsonTweetActivityConverter();
 
-    public static TwitterJsonTweetActivityConverter getInstance() {
-        return instance;
-    }
+  public static TwitterJsonTweetActivityConverter getInstance() {
+    return instance;
+  }
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public Tweet fromActivity(Activity deserialized) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
+  @Override
+  public Tweet fromActivity(Activity deserialized) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
 
-    @Override
-    public List<Activity> toActivityList(Tweet tweet) throws ActivityConversionException {
+  @Override
+  public List<Tweet> fromActivityList(List<Activity> list) {
+    throw new NotImplementedException();
+  }
 
-        Activity activity = new Activity();
+  @Override
+  public List<Activity> toActivityList(Tweet tweet) throws ActivityConversionException {
 
-        updateActivity(tweet, activity);
+    Activity activity = new Activity();
 
-        return Lists.newArrayList(activity);
-    }
+    updateActivity(tweet, activity);
 
-    @Override
-    public List<Tweet> fromActivityList(List<Activity> list) {
-        throw new NotImplementedException();
-    }
+    return Lists.newArrayList(activity);
+  }
 
-    @Override
-    public List<Activity> toActivityList(List<Tweet> serializedList) {
-        List<Activity> result = Lists.newArrayList();
-        for( Tweet item : serializedList ) {
-            try {
-                List<Activity> activities = toActivityList(item);
-                result.addAll(activities);
-            } catch (ActivityConversionException e) {}
-        }
-        return result;
+  @Override
+  public List<Activity> toActivityList(List<Tweet> serializedList) {
+    List<Activity> result = Lists.newArrayList();
+    for ( Tweet item : serializedList ) {
+      try {
+        List<Activity> activities = toActivityList(item);
+        result.addAll(activities);
+      } catch (ActivityConversionException ex) {
+        //
+      }
     }
+    return result;
+  }
 }



[04/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DatumCounterWriter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DatumCounterWriter.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DatumCounterWriter.java
index b7f777e..0c7af1e 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DatumCounterWriter.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DatumCounterWriter.java
@@ -18,11 +18,16 @@
 
 package org.apache.streams.local.test.writer;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
 
-import java.util.*;
+import com.google.common.collect.Lists;
+
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -31,79 +36,79 @@ import java.util.concurrent.atomic.AtomicLong;
  */
 public class DatumCounterWriter implements StreamsPersistWriter{
 
-    @Override
-    public String getId() {
-        return "DatumCounterWriter";
-    }
+  @Override
+  public String getId() {
+    return "DatumCounterWriter";
+  }
 
-    /**
-     * Set of all ids that have been claimed.  Ensures all instances are assigned unique ids
-     */
-    public static Set<Integer> CLAIMED_ID = new HashSet<Integer>();
-    /**
-     * Random instance to generate ids
-     */
-    public static final Random RAND = new Random();
-    /**
-     * Set of instance ids that received data. Usefully for testing parrallelization is actually working.
-     */
-    public final static Set<Integer> SEEN_DATA = Collections.newSetFromMap(new ConcurrentHashMap<Integer, Boolean>());
-    /**
-     * The total count of data seen by a all instances of a processor.
-     */
-    public static final ConcurrentHashMap<String, AtomicLong> COUNTS = new ConcurrentHashMap<>();
-    /**
-     * The documents received
-     */
-    public static final ConcurrentHashMap<String, List<Object>> RECEIVED = new ConcurrentHashMap<>();
+  /**
+   * Set of all ids that have been claimed.  Ensures all instances are assigned unique ids
+   */
+  public static Set<Integer> CLAIMED_ID = new HashSet<Integer>();
+  /**
+   * Random instance to generate ids
+   */
+  public static final Random RAND = new Random();
+  /**
+   * Set of instance ids that received data. Usefully for testing parrallelization is actually working.
+   */
+  public final static Set<Integer> SEEN_DATA = Collections.newSetFromMap(new ConcurrentHashMap<Integer, Boolean>());
+  /**
+   * The total count of data seen by a all instances of a processor.
+   */
+  public static final ConcurrentHashMap<String, AtomicLong> COUNTS = new ConcurrentHashMap<>();
+  /**
+   * The documents received
+   */
+  public static final ConcurrentHashMap<String, List<Object>> RECEIVED = new ConcurrentHashMap<>();
 
-    private int counter = 0;
-    private String writerId;
-    private Integer id;
+  private int counter = 0;
+  private String writerId;
+  private Integer id;
 
-    public DatumCounterWriter(String writerId) {
-        this.writerId = writerId;
-    }
+  public DatumCounterWriter(String writerId) {
+    this.writerId = writerId;
+  }
 
-    @Override
-    public void write(StreamsDatum entry) {
-        ++this.counter;
-        SEEN_DATA.add(this.id);
-        synchronized (RECEIVED) {
-            List<Object> documents = RECEIVED.get(this.writerId);
-            if(documents == null) {
-                List<Object> docs = Lists.newLinkedList();
-                docs.add(entry.getDocument());
-                RECEIVED.put(this.writerId, docs);
-            } else {
-                documents.add(entry.getDocument());
-            }
-        }
+  @Override
+  public void write(StreamsDatum entry) {
+    ++this.counter;
+    SEEN_DATA.add(this.id);
+    synchronized (RECEIVED) {
+      List<Object> documents = RECEIVED.get(this.writerId);
+      if(documents == null) {
+        List<Object> docs = Lists.newLinkedList();
+        docs.add(entry.getDocument());
+        RECEIVED.put(this.writerId, docs);
+      } else {
+        documents.add(entry.getDocument());
+      }
     }
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        synchronized (CLAIMED_ID) {
-            this.id = RAND.nextInt();
-            while(!CLAIMED_ID.add(this.id)) {
-                this.id = RAND.nextInt();
-            }
-        }
+  @Override
+  public void prepare(Object configurationObject) {
+    synchronized (CLAIMED_ID) {
+      this.id = RAND.nextInt();
+      while(!CLAIMED_ID.add(this.id)) {
+        this.id = RAND.nextInt();
+      }
     }
+  }
 
-    @Override
-    public void cleanUp() {
-        synchronized (COUNTS) {
-            AtomicLong count = COUNTS.get(this.writerId);
-            if(count == null) {
-                COUNTS.put(this.writerId, new AtomicLong(this.counter));
-            } else {
-                count.addAndGet(this.counter);
-            }
-        }
+  @Override
+  public void cleanUp() {
+    synchronized (COUNTS) {
+      AtomicLong count = COUNTS.get(this.writerId);
+      if(count == null) {
+        COUNTS.put(this.writerId, new AtomicLong(this.counter));
+      } else {
+        count.addAndGet(this.counter);
+      }
     }
+  }
 
-    public int getDatumsCounted() {
-        return this.counter;
-    }
+  public int getDatumsCounted() {
+    return this.counter;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DoNothingWriter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DoNothingWriter.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DoNothingWriter.java
index d9ec6d3..48f4b68 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DoNothingWriter.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/DoNothingWriter.java
@@ -20,6 +20,7 @@ package org.apache.streams.local.test.writer;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -28,25 +29,25 @@ import org.slf4j.LoggerFactory;
  */
 public class DoNothingWriter implements StreamsPersistWriter {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(DoNothingWriter.class);
+  private final static Logger LOGGER = LoggerFactory.getLogger(DoNothingWriter.class);
 
-    @Override
-    public String getId() {
-        return "DoNothingWriter";
-    }
+  @Override
+  public String getId() {
+    return "DoNothingWriter";
+  }
 
-    @Override
-    public void write(StreamsDatum entry) {
+  @Override
+  public void write(StreamsDatum entry) {
 
-    }
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-    }
+  }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.debug("Writer Clean Up!");
-    }
+  @Override
+  public void cleanUp() {
+    LOGGER.debug("Writer Clean Up!");
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/SystemOutWriter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/SystemOutWriter.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/SystemOutWriter.java
index 76ce353..2711ae1 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/SystemOutWriter.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/writer/SystemOutWriter.java
@@ -20,6 +20,7 @@ package org.apache.streams.local.test.writer;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -28,25 +29,25 @@ import org.slf4j.LoggerFactory;
  */
 public class SystemOutWriter implements StreamsPersistWriter {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SystemOutWriter.class);
+  private final static Logger LOGGER = LoggerFactory.getLogger(SystemOutWriter.class);
 
-    @Override
-    public String getId() {
-        return "SystemOutWriter";
-    }
+  @Override
+  public String getId() {
+    return "SystemOutWriter";
+  }
 
-    @Override
-    public void write(StreamsDatum entry) {
-        System.out.println(entry.document);
-    }
+  @Override
+  public void write(StreamsDatum entry) {
+    System.out.println(entry.document);
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-    }
+  }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.debug("Clean up called writer!");
-    }
+  @Override
+  public void cleanUp() {
+    LOGGER.debug("Clean up called writer!");
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/ExpectedDatumsPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/ExpectedDatumsPersistWriter.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/ExpectedDatumsPersistWriter.java
index 80d4a24..16b98c4 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/ExpectedDatumsPersistWriter.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/ExpectedDatumsPersistWriter.java
@@ -20,58 +20,57 @@ package org.apache.streams.test.component;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
-import static org.junit.Assert.*;
 
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Scanner;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+
 /**
  * Created by rebanks on 2/27/14.
  */
 public class ExpectedDatumsPersistWriter implements StreamsPersistWriter{
 
-    @Override
-    public String getId() {
-        return "ExpectedDatumsPersistWriter";
-    }
+  @Override
+  public String getId() {
+    return "ExpectedDatumsPersistWriter";
+  }
 
-    private StreamsDatumConverter converter;
-    private String fileName;
-    private List<StreamsDatum> expectedDatums;
-    private int counted = 0;
-    private int expectedSize = 0;
+  private StreamsDatumConverter converter;
+  private String fileName;
+  private List<StreamsDatum> expectedDatums;
+  private int counted = 0;
+  private int expectedSize = 0;
 
-    public ExpectedDatumsPersistWriter(StreamsDatumConverter converter, String filePathInResources) {
-        this.converter = converter;
-        this.fileName = filePathInResources;
-    }
+  public ExpectedDatumsPersistWriter(StreamsDatumConverter converter, String filePathInResources) {
+    this.converter = converter;
+    this.fileName = filePathInResources;
+  }
 
 
 
-    @Override
-    public void write(StreamsDatum entry) {
-        int index = this.expectedDatums.indexOf(entry);
-        assertNotEquals("Datum not expected. "+entry.toString(), -1, index);
-        this.expectedDatums.remove(index);
-        ++this.counted;
-    }
+  @Override
+  public void write(StreamsDatum entry) {
+    int index = this.expectedDatums.indexOf(entry);
+    assertNotEquals("Datum not expected. "+entry.toString(), -1, index);
+    this.expectedDatums.remove(index);
+    ++this.counted;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        Scanner scanner = new Scanner(ExpectedDatumsPersistWriter.class.getResourceAsStream(this.fileName));
-        this.expectedDatums = new LinkedList<StreamsDatum>();
-        while(scanner.hasNextLine()) {
-            this.expectedDatums.add(this.converter.convert(scanner.nextLine()));
-        }
-        this.expectedSize = this.expectedDatums.size();
+  @Override
+  public void prepare(Object configurationObject) {
+    Scanner scanner = new Scanner(ExpectedDatumsPersistWriter.class.getResourceAsStream(this.fileName));
+    this.expectedDatums = new LinkedList<StreamsDatum>();
+    while(scanner.hasNextLine()) {
+      this.expectedDatums.add(this.converter.convert(scanner.nextLine()));
     }
+    this.expectedSize = this.expectedDatums.size();
+  }
 
-    @Override
-    public void cleanUp() {
-        assertEquals("Did not received the expected number of StreamsDatums", this.expectedSize, this.counted);
-    }
+  @Override
+  public void cleanUp() {
+    assertEquals("Did not received the expected number of StreamsDatums", this.expectedSize, this.counted);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/FileReaderProvider.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/FileReaderProvider.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/FileReaderProvider.java
index 41e7eed..0fbfae9 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/FileReaderProvider.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/FileReaderProvider.java
@@ -18,10 +18,11 @@
 
 package org.apache.streams.test.component;
 
-import com.google.common.collect.Queues;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.core.StreamsResultSet;
+
+import com.google.common.collect.Queues;
 import org.joda.time.DateTime;
 
 import java.math.BigInteger;
@@ -37,64 +38,64 @@ import java.util.Scanner;
  */
 public class FileReaderProvider implements StreamsProvider {
 
-    private String fileName;
-    private Scanner scanner;
-    private StreamsDatumConverter converter;
-
-    public FileReaderProvider(String filePathInResources, StreamsDatumConverter converter) {
-        this.fileName = filePathInResources;
-        this.converter = converter;
-    }
-
-    @Override
-    public String getId() {
-        return "FileReaderProvider";
-    }
-
-    @Override
-    public void startStream() {
-
-    }
-
-    @Override
-    public StreamsResultSet readCurrent() {
-        return new StreamsResultSet(constructQueue(this.scanner));
-    }
-
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        throw new UnsupportedOperationException();
-    }
-
-    @Override
-    public boolean isRunning() {
-        return this.scanner != null && this.scanner.hasNextLine();
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        this.scanner = new Scanner(FileReaderProvider.class.getResourceAsStream(this.fileName));
-    }
-
-    @Override
-    public void cleanUp() {
-        if(this.scanner!= null) {
-            this.scanner.close();
-            this.scanner = null;
-        }
+  private String fileName;
+  private Scanner scanner;
+  private StreamsDatumConverter converter;
+
+  public FileReaderProvider(String filePathInResources, StreamsDatumConverter converter) {
+    this.fileName = filePathInResources;
+    this.converter = converter;
+  }
+
+  @Override
+  public String getId() {
+    return "FileReaderProvider";
+  }
+
+  @Override
+  public void startStream() {
+
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    return new StreamsResultSet(constructQueue(this.scanner));
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  public boolean isRunning() {
+    return this.scanner != null && this.scanner.hasNextLine();
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.scanner = new Scanner(FileReaderProvider.class.getResourceAsStream(this.fileName));
+  }
+
+  @Override
+  public void cleanUp() {
+    if(this.scanner!= null) {
+      this.scanner.close();
+      this.scanner = null;
     }
+  }
 
-    private Queue<StreamsDatum> constructQueue(Scanner scanner) {
-        Queue<StreamsDatum> data = Queues.newLinkedBlockingQueue();
-        while(scanner.hasNextLine()) {
-            data.add(converter.convert(scanner.nextLine()));
-        }
-        cleanUp();
-        return data;
+  private Queue<StreamsDatum> constructQueue(Scanner scanner) {
+    Queue<StreamsDatum> data = Queues.newLinkedBlockingQueue();
+    while(scanner.hasNextLine()) {
+      data.add(converter.convert(scanner.nextLine()));
     }
+    cleanUp();
+    return data;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StreamsDatumConverter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StreamsDatumConverter.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StreamsDatumConverter.java
index e3b7dd1..9172167 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StreamsDatumConverter.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StreamsDatumConverter.java
@@ -27,5 +27,5 @@ import java.io.Serializable;
  */
 public interface StreamsDatumConverter extends Serializable {
 
-    public StreamsDatum convert(String s);
+  public StreamsDatum convert(String s);
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StringToDocumentConverter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StringToDocumentConverter.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StringToDocumentConverter.java
index 6f4e620..3727aa1 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StringToDocumentConverter.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/StringToDocumentConverter.java
@@ -25,9 +25,9 @@ import org.apache.streams.core.StreamsDatum;
  */
 public class StringToDocumentConverter implements StreamsDatumConverter {
 
-    @Override
-    public StreamsDatum convert(String s) {
-        return new StreamsDatum(s);
-    }
+  @Override
+  public StreamsDatum convert(String s) {
+    return new StreamsDatum(s);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestComponentsLocalStream.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestComponentsLocalStream.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestComponentsLocalStream.java
index 935c8fe..5154ea3 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestComponentsLocalStream.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestComponentsLocalStream.java
@@ -23,6 +23,7 @@ import org.apache.streams.test.component.ExpectedDatumsPersistWriter;
 import org.apache.streams.test.component.FileReaderProvider;
 import org.apache.streams.test.component.StringToDocumentConverter;
 import org.apache.streams.util.ComponentUtils;
+
 import org.junit.After;
 import org.junit.Test;
 
@@ -31,22 +32,22 @@ import org.junit.Test;
  */
 public class TestComponentsLocalStream {
 
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
+  }
 
-    @Test
-    public void testLocalStreamWithComponent() {
-        LocalStreamBuilder builder = new LocalStreamBuilder();
-        builder.newReadCurrentStream("provider", new FileReaderProvider("/TestFile.txt",
-                                                                        new StringToDocumentConverter()));
-        builder.addStreamsPersistWriter("writer", new ExpectedDatumsPersistWriter(new StringToDocumentConverter(),
-                "/TestFile.txt"), 1, "provider")
+  @Test
+  public void testLocalStreamWithComponent() {
+    LocalStreamBuilder builder = new LocalStreamBuilder();
+    builder.newReadCurrentStream("provider", new FileReaderProvider("/TestFile.txt",
+        new StringToDocumentConverter()));
+    builder.addStreamsPersistWriter("writer", new ExpectedDatumsPersistWriter(new StringToDocumentConverter(),
+        "/TestFile.txt"), 1, "provider")
         .start();
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestExpectedDatumsPersitWriter.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestExpectedDatumsPersitWriter.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestExpectedDatumsPersitWriter.java
index 11e891b..0535295 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestExpectedDatumsPersitWriter.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestExpectedDatumsPersitWriter.java
@@ -22,8 +22,8 @@ import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.test.component.ExpectedDatumsPersistWriter;
 import org.apache.streams.test.component.StringToDocumentConverter;
 import org.apache.streams.util.ComponentUtils;
+
 import org.junit.After;
-import org.junit.Ignore;
 import org.junit.Test;
 
 /**
@@ -31,37 +31,37 @@ import org.junit.Test;
  */
 public class TestExpectedDatumsPersitWriter {
 
-    private static final StreamsDatum[] INPUT_DATUMS = new StreamsDatum[] {
-            new StreamsDatum("Document1"),
-            new StreamsDatum("Document2"),
-            new StreamsDatum("Document3"),
-            new StreamsDatum("Document4")
+  private static final StreamsDatum[] INPUT_DATUMS = new StreamsDatum[] {
+      new StreamsDatum("Document1"),
+      new StreamsDatum("Document2"),
+      new StreamsDatum("Document3"),
+      new StreamsDatum("Document4")
 //            Uncomment to prove failures occur, or comment out a datum above
 //            ,new StreamsDatum("Document5")
-    };
+  };
 
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
+  }
 
-    @Test
-    public void testExpectedDatumsPersistWriterFileName() {
-        testDatums(new ExpectedDatumsPersistWriter(new StringToDocumentConverter(), "/TestFile.txt"));
-    }
+  @Test
+  public void testExpectedDatumsPersistWriterFileName() {
+    testDatums(new ExpectedDatumsPersistWriter(new StringToDocumentConverter(), "/TestFile.txt"));
+  }
 
 
 
-    private void testDatums(ExpectedDatumsPersistWriter writer) {
-        writer.prepare(null);
-        for(StreamsDatum datum : INPUT_DATUMS) {
-            writer.write(datum);
-        }
-        writer.cleanUp();
+  private void testDatums(ExpectedDatumsPersistWriter writer) {
+    writer.prepare(null);
+    for(StreamsDatum datum : INPUT_DATUMS) {
+      writer.write(datum);
     }
+    writer.cleanUp();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestFileReaderProvider.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestFileReaderProvider.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestFileReaderProvider.java
index 1ae9a24..a2b7bba 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestFileReaderProvider.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/test/component/tests/TestFileReaderProvider.java
@@ -23,41 +23,39 @@ import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.test.component.FileReaderProvider;
 import org.apache.streams.test.component.StringToDocumentConverter;
 import org.apache.streams.util.ComponentUtils;
+
 import org.junit.After;
-import org.junit.Ignore;
 import org.junit.Test;
 
-import java.io.InputStream;
-
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 
 /**
  *
  */
 public class TestFileReaderProvider {
 
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
+  }
 
-    @Test
-    public void testFileReaderProviderFileName() {
-        String fileName = "/TestFile.txt";
-        FileReaderProvider provider = new FileReaderProvider(fileName, new StringToDocumentConverter());
-        provider.prepare(null);
-        StreamsResultSet resultSet = provider.readCurrent();
-        int count = 0;
-        for(StreamsDatum datum : resultSet) {
-            ++count;
-        }
-        assertEquals(4, count);
-        provider.cleanUp();
+  @Test
+  public void testFileReaderProviderFileName() {
+    String fileName = "/TestFile.txt";
+    FileReaderProvider provider = new FileReaderProvider(fileName, new StringToDocumentConverter());
+    provider.prepare(null);
+    StreamsResultSet resultSet = provider.readCurrent();
+    int count = 0;
+    for(StreamsDatum datum : resultSet) {
+      ++count;
     }
+    assertEquals(4, count);
+    provider.cleanUp();
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsComponentFactory.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsComponentFactory.java b/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsComponentFactory.java
index 9b887af..44ade9c 100644
--- a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsComponentFactory.java
+++ b/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsComponentFactory.java
@@ -19,54 +19,47 @@
 
 package org.apache.streams.pig;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.ArrayUtils;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.data.ActivityConverter;
-import org.apache.streams.data.ActivitySerializer;
-import org.slf4j.Logger;
 
-import java.lang.reflect.InvocationTargetException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
+import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
 
 /**
- * Static reflection wrappers for instantiating StreamsComponents
+ * Static reflection wrappers for instantiating StreamsComponents.
  */
 public class StreamsComponentFactory {
 
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(StreamsComponentFactory.class);
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(StreamsComponentFactory.class);
 
-    public static ActivityConverter getConverterInstance(Class<?> converterClazz) {
+  public static ActivityConverter getConverterInstance(Class<?> converterClazz) {
 
-        Object object = null;
-        try {
-            object = converterClazz.getConstructor().newInstance();
-        } catch (Exception e) {
-            LOGGER.error(e.getMessage());
-        }
-
-        Preconditions.checkNotNull(object);
+    Object object = null;
+    try {
+      object = converterClazz.getConstructor().newInstance();
+    } catch (Exception e) {
+      LOGGER.error(e.getMessage());
+    }
 
-        ActivityConverter converter = (ActivityConverter) object;
+    Preconditions.checkNotNull(object);
 
-        return converter;
+    ActivityConverter converter = (ActivityConverter) object;
 
-    }
+    return converter;
 
-    public static StreamsProcessor getProcessorInstance(Class<?> processorClazz) {
+  }
 
-        Object object = null;
-        try {
-            object = processorClazz.getConstructor().newInstance();
-        } catch (Exception e) {
-            LOGGER.error(e.getMessage());
-        }
-        StreamsProcessor processor = (StreamsProcessor) object;
-        return processor;
+  public static StreamsProcessor getProcessorInstance(Class<?> processorClazz) {
 
+    Object object = null;
+    try {
+      object = processorClazz.getConstructor().newInstance();
+    } catch (Exception e) {
+      LOGGER.error(e.getMessage());
     }
+    StreamsProcessor processor = (StreamsProcessor) object;
+    return processor;
+
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsPigBuilder.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsPigBuilder.java b/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsPigBuilder.java
deleted file mode 100644
index 5ff4145..0000000
--- a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsPigBuilder.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.pig;
-
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.core.StreamBuilder;
-import org.apache.streams.core.StreamsPersistWriter;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.core.StreamsProvider;
-import org.joda.time.DateTime;
-
-import java.math.BigInteger;
-
-/**
- * Goal is to be able to build a pig workflow using same syntax as other
- * StreamsBuilders
- *
- * Currently implementers must write own pig scripts to use this module
- */
-public class StreamsPigBuilder implements StreamBuilder {
-
-    @Override
-    public StreamBuilder setStreamsConfiguration(StreamsConfiguration configuration) {
-        return null;
-    }
-
-    @Override
-    public StreamsConfiguration getStreamsConfiguration() {
-        return null;
-    }
-
-    @Override
-    public StreamBuilder addStreamsProcessor(String s, StreamsProcessor streamsProcessor, int i, String... strings) {
-        return null;
-    }
-
-    @Override
-    public StreamBuilder addStreamsPersistWriter(String s, StreamsPersistWriter streamsPersistWriter, int i, String... strings) {
-        return null;
-    }
-
-    @Override
-    public StreamBuilder newPerpetualStream(String s, StreamsProvider streamsProvider) {
-        return null;
-    }
-
-    @Override
-    public StreamBuilder newReadCurrentStream(String s, StreamsProvider streamsProvider) {
-        return null;
-    }
-
-    @Override
-    public StreamBuilder newReadNewStream(String s, StreamsProvider streamsProvider, BigInteger bigInteger) {
-        return null;
-    }
-
-    @Override
-    public StreamBuilder newReadRangeStream(String s, StreamsProvider streamsProvider, DateTime dateTime, DateTime dateTime2) {
-        return null;
-    }
-
-    @Override
-    public void start() {
-
-    }
-
-    @Override
-    public void stop() {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDatumExec.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDatumExec.java b/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDatumExec.java
index 74f7eb5..cd08020 100644
--- a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDatumExec.java
+++ b/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDatumExec.java
@@ -19,21 +19,25 @@
 
 package org.apache.streams.pig;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.data.util.RFC3339Utils;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import datafu.pig.util.AliasableEvalFunc;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.pig.EvalFunc;
 import org.apache.pig.builtin.MonitoredUDF;
-import org.apache.pig.data.*;
+import org.apache.pig.data.BagFactory;
+import org.apache.pig.data.DataBag;
+import org.apache.pig.data.DataType;
+import org.apache.pig.data.Tuple;
+import org.apache.pig.data.TupleFactory;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.util.UDFContext;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.data.util.RFC3339Utils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 
@@ -48,40 +52,40 @@ import java.util.concurrent.TimeUnit;
 @MonitoredUDF(timeUnit = TimeUnit.SECONDS, duration = 30, intDefault = 10)
 public class StreamsProcessDatumExec extends AliasableEvalFunc<DataBag> {
 
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(StreamsProcessDatumExec.class);
-
-    TupleFactory mTupleFactory = TupleFactory.getInstance();
-    BagFactory mBagFactory = BagFactory.getInstance();
-
-    StreamsProcessor streamsProcessor;
-
-    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    public StreamsProcessDatumExec(String... execArgs) throws ClassNotFoundException{
-        Preconditions.checkNotNull(execArgs);
-        Preconditions.checkArgument(execArgs.length > 0);
-        String classFullName = execArgs[0];
-        Preconditions.checkNotNull(classFullName);
-        String[] prepareArgs = (String[]) ArrayUtils.remove(execArgs, 0);
-        streamsProcessor = StreamsComponentFactory.getProcessorInstance(Class.forName(classFullName));
-        if( execArgs.length == 1 ) {
-            LOGGER.debug("prepare (null)");
-            streamsProcessor.prepare(null);
-        } else if( execArgs.length > 1 ) {
-            LOGGER.debug("prepare " + Arrays.toString(prepareArgs));
-            streamsProcessor.prepare(prepareArgs);
-        }
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(StreamsProcessDatumExec.class);
+
+  TupleFactory mTupleFactory = TupleFactory.getInstance();
+  BagFactory mBagFactory = BagFactory.getInstance();
+
+  StreamsProcessor streamsProcessor;
+
+  ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  public StreamsProcessDatumExec(String... execArgs) throws ClassNotFoundException{
+    Preconditions.checkNotNull(execArgs);
+    Preconditions.checkArgument(execArgs.length > 0);
+    String classFullName = execArgs[0];
+    Preconditions.checkNotNull(classFullName);
+    String[] prepareArgs = (String[]) ArrayUtils.remove(execArgs, 0);
+    streamsProcessor = StreamsComponentFactory.getProcessorInstance(Class.forName(classFullName));
+    if( execArgs.length == 1 ) {
+      LOGGER.debug("prepare (null)");
+      streamsProcessor.prepare(null);
+    } else if( execArgs.length > 1 ) {
+      LOGGER.debug("prepare " + Arrays.toString(prepareArgs));
+      streamsProcessor.prepare(prepareArgs);
     }
+  }
 
-    @Override
-    public DataBag exec(Tuple input) throws IOException {
+  @Override
+  public DataBag exec(Tuple input) throws IOException {
 
-        if (input == null || input.size() == 0)
-            return null;
+    if (input == null || input.size() == 0)
+      return null;
 
-        DataBag output = BagFactory.getInstance().newDefaultBag();
+    DataBag output = BagFactory.getInstance().newDefaultBag();
 
-        Configuration conf = UDFContext.getUDFContext().getJobConf();
+    Configuration conf = UDFContext.getUDFContext().getJobConf();
 
 //      I would prefer it work this way, but at the moment it doesn't
 
@@ -95,91 +99,91 @@ public class StreamsProcessDatumExec extends AliasableEvalFunc<DataBag> {
 //        }
 //        String object = getString(input, "object");
 
-        String id = (String) input.get(0);
-        String source = (String) input.get(1);
-        Long timestamp;
-        try {
-            timestamp = (Long) input.get(2);
-        } catch( Exception e ) {
-            timestamp = RFC3339Utils.parseUTC((String)input.get(2)).getMillis();
-        }
-        String object = (String) input.get(3);
+    String id = (String) input.get(0);
+    String source = (String) input.get(1);
+    Long timestamp;
+    try {
+      timestamp = (Long) input.get(2);
+    } catch( Exception e ) {
+      timestamp = RFC3339Utils.parseUTC((String)input.get(2)).getMillis();
+    }
+    String object = (String) input.get(3);
 
-        StreamsDatum entry = new StreamsDatum(object, id, new DateTime(timestamp));
+    StreamsDatum entry = new StreamsDatum(object, id, new DateTime(timestamp));
 
-        List<StreamsDatum> resultSet = streamsProcessor.process(entry);
-        List<Tuple> resultTupleList = Lists.newArrayList();
+    List<StreamsDatum> resultSet = streamsProcessor.process(entry);
+    List<Tuple> resultTupleList = Lists.newArrayList();
 
-        for( StreamsDatum resultDatum : resultSet ) {
-            Tuple tuple = mTupleFactory.newTuple();
-            tuple.append(id);
-            tuple.append(source);
-            tuple.append(timestamp);
+    for( StreamsDatum resultDatum : resultSet ) {
+      Tuple tuple = mTupleFactory.newTuple();
+      tuple.append(id);
+      tuple.append(source);
+      tuple.append(timestamp);
 
-            if( resultDatum.getDocument() instanceof String )
-                tuple.append(resultDatum.getDocument());
-            else
-                tuple.append(mapper.writeValueAsString(resultDatum.getDocument()));
-            resultTupleList.add(tuple);
-        }
+      if( resultDatum.getDocument() instanceof String )
+        tuple.append(resultDatum.getDocument());
+      else
+        tuple.append(mapper.writeValueAsString(resultDatum.getDocument()));
+      resultTupleList.add(tuple);
+    }
 
-        DataBag result = mBagFactory.newDefaultBag(resultTupleList);
+    DataBag result = mBagFactory.newDefaultBag(resultTupleList);
 
-        return result;
+    return result;
 
-    }
+  }
 
-    public void finish() {
-        streamsProcessor.cleanUp();
+  public void finish() {
+    streamsProcessor.cleanUp();
+  }
+
+  @Override
+  public Schema getOutputSchema(Schema schema) {
+    // Check that we were passed two fields
+    String error = "Expected: id\tsource\ttimestamp\tobject";
+    if (schema.size() != 4) {
+      throw new RuntimeException(error);
     }
 
-    @Override
-    public Schema getOutputSchema(Schema schema) {
-        // Check that we were passed two fields
-        String error = "Expected: id\tsource\ttimestamp\tobject";
-        if (schema.size() != 4) {
-            throw new RuntimeException(error);
-        }
-
-        try {
-            // Get the types for both columns and check them.  If they are
-            // wrong, figure out what types were passed and give a good error
-            // message.
-            if (schema.getField(0).type != DataType.CHARARRAY &&
-                    schema.getField(0).type != DataType.LONG) {
-                error += "Problem with id: must be CHARARRAY or LONG";
-                error += "\t(";
-                error += DataType.findTypeName(schema.getField(0).type);
-                error += ")\n";
-                throw new RuntimeException(error);
-            }
-            if (schema.getField(1).type != DataType.CHARARRAY) {
-                error += "Problem with source: must be CHARARRAY";
-                error += "\t(";
-                error += DataType.findTypeName(schema.getField(1).type);
-                error += ")\n";
-                throw new RuntimeException(error);
-            }
-            if (schema.getField(2).type != DataType.CHARARRAY &&
-                    schema.getField(2).type != DataType.LONG) {
-                error += "Problem with timestamp: must be CHARARRAY or LONG";
-                error += "\t(";
-                error += DataType.findTypeName(schema.getField(2).type);
-                error += ")\n";
-                throw new RuntimeException(error);
-            }
-            if (schema.getField(3).type != DataType.CHARARRAY) {
-                error += "Problem with object: must be CHARARRAY";
-                error += "\t(";
-                error += DataType.findTypeName(schema.getField(3).type);
-                error += ")\n";
-                throw new RuntimeException(error);
-            }
-        } catch (Exception e) {
-            throw new RuntimeException(error);
-        }
-
-        // Always hand back the same schema we are passed
-        return schema;
+    try {
+      // Get the types for both columns and check them.  If they are
+      // wrong, figure out what types were passed and give a good error
+      // message.
+      if (schema.getField(0).type != DataType.CHARARRAY &&
+          schema.getField(0).type != DataType.LONG) {
+        error += "Problem with id: must be CHARARRAY or LONG";
+        error += "\t(";
+        error += DataType.findTypeName(schema.getField(0).type);
+        error += ")\n";
+        throw new RuntimeException(error);
+      }
+      if (schema.getField(1).type != DataType.CHARARRAY) {
+        error += "Problem with source: must be CHARARRAY";
+        error += "\t(";
+        error += DataType.findTypeName(schema.getField(1).type);
+        error += ")\n";
+        throw new RuntimeException(error);
+      }
+      if (schema.getField(2).type != DataType.CHARARRAY &&
+          schema.getField(2).type != DataType.LONG) {
+        error += "Problem with timestamp: must be CHARARRAY or LONG";
+        error += "\t(";
+        error += DataType.findTypeName(schema.getField(2).type);
+        error += ")\n";
+        throw new RuntimeException(error);
+      }
+      if (schema.getField(3).type != DataType.CHARARRAY) {
+        error += "Problem with object: must be CHARARRAY";
+        error += "\t(";
+        error += DataType.findTypeName(schema.getField(3).type);
+        error += ")\n";
+        throw new RuntimeException(error);
+      }
+    } catch (Exception e) {
+      throw new RuntimeException(error);
     }
+
+    // Always hand back the same schema we are passed
+    return schema;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDocumentExec.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDocumentExec.java b/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDocumentExec.java
index 788b347..2f40923 100644
--- a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDocumentExec.java
+++ b/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsProcessDocumentExec.java
@@ -19,25 +19,15 @@
 
 package org.apache.streams.pig;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import datafu.pig.util.SimpleEvalFunc;
 import org.apache.commons.lang.ArrayUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.pig.EvalFunc;
 import org.apache.pig.builtin.MonitoredUDF;
-import org.apache.pig.data.BagFactory;
-import org.apache.pig.data.DataBag;
-import org.apache.pig.data.Tuple;
-import org.apache.pig.data.TupleFactory;
-import org.apache.pig.impl.util.UDFContext;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.data.ActivitySerializer;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.slf4j.Logger;
 
 import java.io.IOException;
@@ -54,59 +44,59 @@ import java.util.concurrent.TimeUnit;
 @MonitoredUDF(timeUnit = TimeUnit.SECONDS, duration = 30, intDefault = 10)
 public class StreamsProcessDocumentExec extends SimpleEvalFunc<String> {
 
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(StreamsProcessDocumentExec.class);
-
-    StreamsProcessor streamsProcessor;
-    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    public StreamsProcessDocumentExec(String... execArgs) throws ClassNotFoundException{
-        Preconditions.checkNotNull(execArgs);
-        Preconditions.checkArgument(execArgs.length > 0);
-        String classFullName = execArgs[0];
-        Preconditions.checkNotNull(classFullName);
-        String[] prepareArgs = (String[]) ArrayUtils.remove(execArgs, 0);
-        streamsProcessor = StreamsComponentFactory.getProcessorInstance(Class.forName(classFullName));
-        if( execArgs.length == 1 ) {
-            LOGGER.debug("prepare (null)");
-            streamsProcessor.prepare(null);
-        } else if( execArgs.length > 1 ) {
-            LOGGER.debug("prepare " + Arrays.toString(prepareArgs));
-            streamsProcessor.prepare(prepareArgs);
-        }
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(StreamsProcessDocumentExec.class);
+
+  StreamsProcessor streamsProcessor;
+  ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  public StreamsProcessDocumentExec(String... execArgs) throws ClassNotFoundException{
+    Preconditions.checkNotNull(execArgs);
+    Preconditions.checkArgument(execArgs.length > 0);
+    String classFullName = execArgs[0];
+    Preconditions.checkNotNull(classFullName);
+    String[] prepareArgs = (String[]) ArrayUtils.remove(execArgs, 0);
+    streamsProcessor = StreamsComponentFactory.getProcessorInstance(Class.forName(classFullName));
+    if( execArgs.length == 1 ) {
+      LOGGER.debug("prepare (null)");
+      streamsProcessor.prepare(null);
+    } else if( execArgs.length > 1 ) {
+      LOGGER.debug("prepare " + Arrays.toString(prepareArgs));
+      streamsProcessor.prepare(prepareArgs);
     }
+  }
 
-    public String call(String document) throws IOException {
+  public String call(String document) throws IOException {
 
-        Preconditions.checkNotNull(streamsProcessor);
-        Preconditions.checkNotNull(document);
+    Preconditions.checkNotNull(streamsProcessor);
+    Preconditions.checkNotNull(document);
 
-        LOGGER.debug(document);
+    LOGGER.debug(document);
 
-        StreamsDatum entry = new StreamsDatum(document);
+    StreamsDatum entry = new StreamsDatum(document);
 
-        Preconditions.checkNotNull(entry);
+    Preconditions.checkNotNull(entry);
 
-        LOGGER.debug(entry.toString());
+    LOGGER.debug(entry.toString());
 
-        List<StreamsDatum> resultSet = streamsProcessor.process(entry);
+    List<StreamsDatum> resultSet = streamsProcessor.process(entry);
 
-        LOGGER.debug(resultSet.toString());
+    LOGGER.debug(resultSet.toString());
 
-        Object resultDoc = null;
-        for( StreamsDatum resultDatum : resultSet ) {
-            resultDoc = resultDatum.getDocument();
-        }
+    Object resultDoc = null;
+    for( StreamsDatum resultDatum : resultSet ) {
+      resultDoc = resultDatum.getDocument();
+    }
 
-        Preconditions.checkNotNull(resultDoc);
+    Preconditions.checkNotNull(resultDoc);
 
-        if( resultDoc instanceof String )
-            return (String) resultDoc;
-        else
-            return mapper.writeValueAsString(resultDoc);
+    if( resultDoc instanceof String )
+      return (String) resultDoc;
+    else
+      return mapper.writeValueAsString(resultDoc);
 
-    }
+  }
 
-    public void finish() {
-        streamsProcessor.cleanUp();
-    }
+  public void finish() {
+    streamsProcessor.cleanUp();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsStorage.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsStorage.java b/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsStorage.java
deleted file mode 100644
index 7692763..0000000
--- a/streams-runtimes/streams-runtime-pig/src/main/java/org/apache/streams/pig/StreamsStorage.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.pig;
-
-import org.apache.pig.builtin.PigStorage;
-
-/**
- * It would be nice if streams persisters could be used for input / output
- * within the pig runtime.
- */
-public class StreamsStorage extends PigStorage {
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/AppendStringProcessor.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/AppendStringProcessor.java b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/AppendStringProcessor.java
index 4db38fd..a48a5e8 100644
--- a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/AppendStringProcessor.java
+++ b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/AppendStringProcessor.java
@@ -21,46 +21,47 @@ package org.apache.streams.pig.test;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
+
 import org.slf4j.Logger;
 
 import java.util.LinkedList;
 import java.util.List;
 
 /**
- * Used to Test Pig processor wrapper with arguments to prepare method
+ * Used to Test Pig processor wrapper with arguments to prepare method.
  */
 public class AppendStringProcessor implements StreamsProcessor {
 
-    public final static String STREAMS_ID = "AppendStringProcessor";
+  public final static String STREAMS_ID = "AppendStringProcessor";
 
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(AppendStringProcessor.class);
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(AppendStringProcessor.class);
 
-    String append;
+  String append;
 
-    public AppendStringProcessor() {
-    }
+  public AppendStringProcessor() {
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        List<StreamsDatum> resultSet;
-        resultSet = new LinkedList<StreamsDatum>();
-        String value = (String) entry.getDocument()+ new String(append);
-        resultSet.add(new StreamsDatum(value));
-        return resultSet;
-    }
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    List<StreamsDatum> resultSet;
+    resultSet = new LinkedList<StreamsDatum>();
+    String value = (String) entry.getDocument()+ new String(append);
+    resultSet.add(new StreamsDatum(value));
+    return resultSet;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        append = ((String[]) configurationObject)[0];
-    }
+  @Override
+  public void prepare(Object configurationObject) {
+    append = ((String[]) configurationObject)[0];
+  }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.info("Processor clean up");
-    }
+  @Override
+  public void cleanUp() {
+    LOGGER.info("Processor clean up");
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/CopyThriceProcessor.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/CopyThriceProcessor.java b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/CopyThriceProcessor.java
index 2b687b1..5336007 100644
--- a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/CopyThriceProcessor.java
+++ b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/CopyThriceProcessor.java
@@ -21,46 +21,47 @@ package org.apache.streams.pig.test;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
+
 import org.slf4j.Logger;
 
 import java.util.LinkedList;
 import java.util.List;
 
 /**
- * Used to Test Pig processor wrapper when multiple datums are returned
+ * Used to Test Pig processor wrapper when multiple datums are returned.
  */
 public class CopyThriceProcessor implements StreamsProcessor {
 
-    public final static String STREAMS_ID = "CopyThriceProcessor";
+  public final static String STREAMS_ID = "CopyThriceProcessor";
 
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(CopyThriceProcessor.class);
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(CopyThriceProcessor.class);
 
-    List<StreamsDatum> result;
+  List<StreamsDatum> result;
 
-    public CopyThriceProcessor() {
-    }
+  public CopyThriceProcessor() {
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        this.result = new LinkedList<StreamsDatum>();
-        result.add(entry);
-        result.add(entry);
-        result.add(entry);
-        return result;
-    }
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    this.result = new LinkedList<StreamsDatum>();
+    result.add(entry);
+    result.add(entry);
+    result.add(entry);
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-    }
+  }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.info("Processor clean up");
-    }
+  @Override
+  public void cleanUp() {
+    LOGGER.info("Processor clean up");
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/DoNothingProcessor.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/DoNothingProcessor.java b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/DoNothingProcessor.java
index 5528a38..07d3b6f 100644
--- a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/DoNothingProcessor.java
+++ b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/DoNothingProcessor.java
@@ -21,44 +21,45 @@ package org.apache.streams.pig.test;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
+
 import org.slf4j.Logger;
 
 import java.util.LinkedList;
 import java.util.List;
 
 /**
- * Used to Test Pig processor wrapper - datum passthrough
+ * Used to Test Pig processor wrapper - datum passthrough.
  */
 public class DoNothingProcessor implements StreamsProcessor {
 
-    public final static String STREAMS_ID = "DoNothingProcessor";
+  public final static String STREAMS_ID = "DoNothingProcessor";
 
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(DoNothingProcessor.class);
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(DoNothingProcessor.class);
 
-    List<StreamsDatum> result;
+  List<StreamsDatum> result;
 
-    public DoNothingProcessor() {
-    }
+  public DoNothingProcessor() {
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        this.result = new LinkedList<StreamsDatum>();
-        result.add(entry);
-        return result;
-    }
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    this.result = new LinkedList<StreamsDatum>();
+    result.add(entry);
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        LOGGER.info("Processor prepare");
-    }
+  @Override
+  public void prepare(Object configurationObject) {
+    LOGGER.info("Processor prepare");
+  }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.info("Processor clean up");
-    }
+  @Override
+  public void cleanUp() {
+    LOGGER.info("Processor clean up");
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigConverterTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigConverterTest.java b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigConverterTest.java
index 5dad52c..a983cc7 100644
--- a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigConverterTest.java
+++ b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigConverterTest.java
@@ -19,13 +19,14 @@
 
 package org.apache.streams.pig.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
-import org.apache.pig.pigunit.PigTest;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.twitter.converter.TwitterDateTimeFormat;
 import org.apache.streams.twitter.converter.TwitterJsonRetweetActivityConverter;
 import org.apache.streams.twitter.pojo.Retweet;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
+import org.apache.pig.pigunit.PigTest;
 import org.apache.tools.ant.util.StringUtils;
 import org.junit.Test;
 
@@ -34,23 +35,23 @@ import org.junit.Test;
  */
 public class PigConverterTest {
 
-    @Test
-    public void testPigConverter() throws Exception {
+  @Test
+  public void testPigConverter() throws Exception {
 
-        String[] input = {
-                "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{\"retweeted_status\":{\"contributors\":null,\"text\":\"The Costa Concordia cruise ship accident could be a disaster for the industry | http://t.co/M9UUNvZi (via @TIMEMoneyland)\",\"geo\":null,\"retweeted\":false,\"in_reply_to_screen_name\":null,\"possibly_sensitive\":false,\"truncated\":false,\"lang\":\"en\",\"entities\":{\"symbols\":[],\"urls\":[{\"expanded_url\":\"http://ti.me/zYyEtD\",\"indices\":[80,100],\"display_url\":\"ti.me/zYyEtD\",\"url\":\"http://t.co/M9UUNvZi\"}],\"hashtags\":[],\"user_mentions\":[{\"id\":245888431,\"name\":\"TIME Moneyland\",\"indices\":[106,120],\"screen_name\":\"TIMEMoneyland\",\"id_str\":\"245888431\"}]},\"in_reply_to_status_id_str\":null,\"id\":159470076259602432,\"source\":\"<a href=\\\"http://www.hootsuite.com\\\" rel=\\\"nofollow\\\">HootSuite<\\/a>\",\"in_reply_to_user_id_str\":null,\"favorited\":false,\"in_reply_to_status_id\":null,\"retweet_count\":71,\"create
 d_at\":\"Wed Jan 18 03:00:03 +0000 2012\",\"in_reply_to_user_id\":null,\"favorite_count\":14,\"id_str\":\"159470076259602432\",\"place\":null,\"user\":{\"location\":\"\",\"default_profile\":false,\"profile_background_tile\":true,\"statuses_count\":70754,\"lang\":\"en\",\"profile_link_color\":\"1B4F89\",\"profile_banner_url\":\"https://pbs.twimg.com/profile_banners/14293310/1355243462\",\"id\":14293310,\"following\":false,\"protected\":false,\"favourites_count\":59,\"profile_text_color\":\"000000\",\"description\":\"Breaking news and current events from around the globe. Hosted by TIME staff. Tweet questions to our customer service team @TIMEmag_Service.\",\"verified\":true,\"contributors_enabled\":false,\"profile_sidebar_border_color\":\"000000\",\"name\":\"TIME.com\",\"profile_background_color\":\"CC0000\",\"created_at\":\"Thu Apr 03 13:54:30 +0000 2008\",\"default_profile_image\":false,\"followers_count\":5146268,\"profile_image_url_https\":\"https://pbs.twimg.com/profile_images/1
 700796190/Picture_24_normal.png\",\"geo_enabled\":false,\"profile_background_image_url\":\"http://a0.twimg.com/profile_background_images/735228291/107f1a300a90ee713937234bb3d139c0.jpeg\",\"profile_background_image_url_https\":\"https://si0.twimg.com/profile_background_images/735228291/107f1a300a90ee713937234bb3d139c0.jpeg\",\"follow_request_sent\":false,\"entities\":{\"description\":{\"urls\":[]},\"url\":{\"urls\":[{\"expanded_url\":\"http://www.time.com\",\"indices\":[0,22],\"display_url\":\"time.com\",\"url\":\"http://t.co/4aYbUuAeSh\"}]}},\"url\":\"http://t.co/4aYbUuAeSh\",\"utc_offset\":-18000,\"time_zone\":\"Eastern Time (US & Canada)\",\"notifications\":false,\"profile_use_background_image\":true,\"friends_count\":742,\"profile_sidebar_fill_color\":\"D9D9D9\",\"screen_name\":\"TIME\",\"id_str\":\"14293310\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/1700796190/Picture_24_normal.png\",\"listed_count\":76944,\"is_translator\":false},\"coordinates\":null},\"contr
 ibutors\":null,\"text\":\"RT @TIME: The Costa Concordia cruise ship accident could be a disaster for the industry | http://t.co/M9UUNvZi (via @TIMEMoneyland)\",\"geo\":null,\"retweeted\":false,\"in_reply_to_screen_name\":null,\"possibly_sensitive\":false,\"truncated\":false,\"lang\":\"en\",\"entities\":{\"symbols\":[],\"urls\":[{\"expanded_url\":\"http://ti.me/zYyEtD\",\"indices\":[90,110],\"display_url\":\"ti.me/zYyEtD\",\"url\":\"http://t.co/M9UUNvZi\"}],\"hashtags\":[],\"user_mentions\":[{\"id\":14293310,\"name\":\"TIME.com\",\"indices\":[3,8],\"screen_name\":\"TIME\",\"id_str\":\"14293310\"},{\"id\":245888431,\"name\":\"TIME Moneyland\",\"indices\":[116,130],\"screen_name\":\"TIMEMoneyland\",\"id_str\":\"245888431\"}]},\"in_reply_to_status_id_str\":null,\"id\":159475541894897679,\"source\":\"<a href=\\\"http://twitter.com/download/iphone\\\" rel=\\\"nofollow\\\">Twitter for iPhone<\\/a>\",\"in_reply_to_user_id_str\":null,\"favorited\":false,\"in_reply_to_status_id\":null,\"retwe
 et_count\":71,\"created_at\":\"Wed Jan 18 03:21:46 +0000 2012\",\"in_reply_to_user_id\":null,\"favorite_count\":0,\"id_str\":\"159475541894897679\",\"place\":null,\"user\":{\"location\":\"\",\"default_profile\":false,\"profile_background_tile\":true,\"statuses_count\":5053,\"lang\":\"en\",\"profile_link_color\":\"738D84\",\"id\":27552112,\"following\":false,\"protected\":false,\"favourites_count\":52,\"profile_text_color\":\"97CEC9\",\"description\":\"\",\"verified\":false,\"contributors_enabled\":false,\"profile_sidebar_border_color\":\"A9AC00\",\"name\":\"rafael medina-flores\",\"profile_background_color\":\"C5EFE3\",\"created_at\":\"Mon Mar 30 01:21:55 +0000 2009\",\"default_profile_image\":false,\"followers_count\":963,\"profile_image_url_https\":\"https://pbs.twimg.com/profile_images/2519547938/image_normal.jpg\",\"geo_enabled\":true,\"profile_background_image_url\":\"http://a0.twimg.com/profile_background_images/167479660/trireme.jpg\",\"profile_background_image_url_https\":\"
 https://si0.twimg.com/profile_background_images/167479660/trireme.jpg\",\"follow_request_sent\":false,\"entities\":{\"description\":{\"urls\":[]}},\"url\":null,\"utc_offset\":-25200,\"time_zone\":\"Mountain Time (US & Canada)\",\"notifications\":false,\"profile_use_background_image\":true,\"friends_count\":1800,\"profile_sidebar_fill_color\":\"5C4F3C\",\"screen_name\":\"rmedinaflores\",\"id_str\":\"27552112\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/2519547938/image_normal.jpg\",\"listed_count\":50,\"is_translator\":false},\"coordinates\":null}"
-        };
+    String[] input = {
+        "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{\"retweeted_status\":{\"contributors\":null,\"text\":\"The Costa Concordia cruise ship accident could be a disaster for the industry | http://t.co/M9UUNvZi (via @TIMEMoneyland)\",\"geo\":null,\"retweeted\":false,\"in_reply_to_screen_name\":null,\"possibly_sensitive\":false,\"truncated\":false,\"lang\":\"en\",\"entities\":{\"symbols\":[],\"urls\":[{\"expanded_url\":\"http://ti.me/zYyEtD\",\"indices\":[80,100],\"display_url\":\"ti.me/zYyEtD\",\"url\":\"http://t.co/M9UUNvZi\"}],\"hashtags\":[],\"user_mentions\":[{\"id\":245888431,\"name\":\"TIME Moneyland\",\"indices\":[106,120],\"screen_name\":\"TIMEMoneyland\",\"id_str\":\"245888431\"}]},\"in_reply_to_status_id_str\":null,\"id\":159470076259602432,\"source\":\"<a href=\\\"http://www.hootsuite.com\\\" rel=\\\"nofollow\\\">HootSuite<\\/a>\",\"in_reply_to_user_id_str\":null,\"favorited\":false,\"in_reply_to_status_id\":null,\"retweet_count\":71,\"created_at\":\
 "Wed Jan 18 03:00:03 +0000 2012\",\"in_reply_to_user_id\":null,\"favorite_count\":14,\"id_str\":\"159470076259602432\",\"place\":null,\"user\":{\"location\":\"\",\"default_profile\":false,\"profile_background_tile\":true,\"statuses_count\":70754,\"lang\":\"en\",\"profile_link_color\":\"1B4F89\",\"profile_banner_url\":\"https://pbs.twimg.com/profile_banners/14293310/1355243462\",\"id\":14293310,\"following\":false,\"protected\":false,\"favourites_count\":59,\"profile_text_color\":\"000000\",\"description\":\"Breaking news and current events from around the globe. Hosted by TIME staff. Tweet questions to our customer service team @TIMEmag_Service.\",\"verified\":true,\"contributors_enabled\":false,\"profile_sidebar_border_color\":\"000000\",\"name\":\"TIME.com\",\"profile_background_color\":\"CC0000\",\"created_at\":\"Thu Apr 03 13:54:30 +0000 2008\",\"default_profile_image\":false,\"followers_count\":5146268,\"profile_image_url_https\":\"https://pbs.twimg.com/profile_images/170079619
 0/Picture_24_normal.png\",\"geo_enabled\":false,\"profile_background_image_url\":\"http://a0.twimg.com/profile_background_images/735228291/107f1a300a90ee713937234bb3d139c0.jpeg\",\"profile_background_image_url_https\":\"https://si0.twimg.com/profile_background_images/735228291/107f1a300a90ee713937234bb3d139c0.jpeg\",\"follow_request_sent\":false,\"entities\":{\"description\":{\"urls\":[]},\"url\":{\"urls\":[{\"expanded_url\":\"http://www.time.com\",\"indices\":[0,22],\"display_url\":\"time.com\",\"url\":\"http://t.co/4aYbUuAeSh\"}]}},\"url\":\"http://t.co/4aYbUuAeSh\",\"utc_offset\":-18000,\"time_zone\":\"Eastern Time (US & Canada)\",\"notifications\":false,\"profile_use_background_image\":true,\"friends_count\":742,\"profile_sidebar_fill_color\":\"D9D9D9\",\"screen_name\":\"TIME\",\"id_str\":\"14293310\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/1700796190/Picture_24_normal.png\",\"listed_count\":76944,\"is_translator\":false},\"coordinates\":null},\"contributors\
 ":null,\"text\":\"RT @TIME: The Costa Concordia cruise ship accident could be a disaster for the industry | http://t.co/M9UUNvZi (via @TIMEMoneyland)\",\"geo\":null,\"retweeted\":false,\"in_reply_to_screen_name\":null,\"possibly_sensitive\":false,\"truncated\":false,\"lang\":\"en\",\"entities\":{\"symbols\":[],\"urls\":[{\"expanded_url\":\"http://ti.me/zYyEtD\",\"indices\":[90,110],\"display_url\":\"ti.me/zYyEtD\",\"url\":\"http://t.co/M9UUNvZi\"}],\"hashtags\":[],\"user_mentions\":[{\"id\":14293310,\"name\":\"TIME.com\",\"indices\":[3,8],\"screen_name\":\"TIME\",\"id_str\":\"14293310\"},{\"id\":245888431,\"name\":\"TIME Moneyland\",\"indices\":[116,130],\"screen_name\":\"TIMEMoneyland\",\"id_str\":\"245888431\"}]},\"in_reply_to_status_id_str\":null,\"id\":159475541894897679,\"source\":\"<a href=\\\"http://twitter.com/download/iphone\\\" rel=\\\"nofollow\\\">Twitter for iPhone<\\/a>\",\"in_reply_to_user_id_str\":null,\"favorited\":false,\"in_reply_to_status_id\":null,\"retweet_count
 \":71,\"created_at\":\"Wed Jan 18 03:21:46 +0000 2012\",\"in_reply_to_user_id\":null,\"favorite_count\":0,\"id_str\":\"159475541894897679\",\"place\":null,\"user\":{\"location\":\"\",\"default_profile\":false,\"profile_background_tile\":true,\"statuses_count\":5053,\"lang\":\"en\",\"profile_link_color\":\"738D84\",\"id\":27552112,\"following\":false,\"protected\":false,\"favourites_count\":52,\"profile_text_color\":\"97CEC9\",\"description\":\"\",\"verified\":false,\"contributors_enabled\":false,\"profile_sidebar_border_color\":\"A9AC00\",\"name\":\"rafael medina-flores\",\"profile_background_color\":\"C5EFE3\",\"created_at\":\"Mon Mar 30 01:21:55 +0000 2009\",\"default_profile_image\":false,\"followers_count\":963,\"profile_image_url_https\":\"https://pbs.twimg.com/profile_images/2519547938/image_normal.jpg\",\"geo_enabled\":true,\"profile_background_image_url\":\"http://a0.twimg.com/profile_background_images/167479660/trireme.jpg\",\"profile_background_image_url_https\":\"https://
 si0.twimg.com/profile_background_images/167479660/trireme.jpg\",\"follow_request_sent\":false,\"entities\":{\"description\":{\"urls\":[]}},\"url\":null,\"utc_offset\":-25200,\"time_zone\":\"Mountain Time (US & Canada)\",\"notifications\":false,\"profile_use_background_image\":true,\"friends_count\":1800,\"profile_sidebar_fill_color\":\"5C4F3C\",\"screen_name\":\"rmedinaflores\",\"id_str\":\"27552112\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/2519547938/image_normal.jpg\",\"listed_count\":50,\"is_translator\":false},\"coordinates\":null}"
+    };
 
-        String doc = (String) StringUtils.split(input[0], '\t').get(3);
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(new TwitterDateTimeFormat().getFormat()));
-        String outdoc = mapper.writeValueAsString(new TwitterJsonRetweetActivityConverter().toActivityList(mapper.readValue(doc, Retweet.class)).get(0));
+    String doc = (String) StringUtils.split(input[0], '\t').get(3);
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(new TwitterDateTimeFormat().getFormat()));
+    String outdoc = mapper.writeValueAsString(new TwitterJsonRetweetActivityConverter().toActivityList(mapper.readValue(doc, Retweet.class)).get(0));
 
-        String[] output = new String[1];
-        output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006," + outdoc + ")";
+    String[] output = new String[1];
+    output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006," + outdoc + ")";
 
-        PigTest test;
-        test = new PigTest("src/test/resources/pigconvertertest.pig");
-        test.assertOutput("in", input, "out", output);
+    PigTest test;
+    test = new PigTest("src/test/resources/pigconvertertest.pig");
+    test.assertOutput("in", input, "out", output);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDatumTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDatumTest.java b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDatumTest.java
index 80b17b4..1cb7252 100644
--- a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDatumTest.java
+++ b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDatumTest.java
@@ -19,74 +19,74 @@
 
 package org.apache.streams.pig.test;
 
-import org.apache.pig.pigunit.PigTest;
 import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.twitter.converter.TwitterJsonTweetActivityConverter;
+
+import org.apache.pig.pigunit.PigTest;
 import org.apache.tools.ant.util.StringUtils;
 import org.junit.Test;
 
 import java.util.List;
 
 /**
- * These are tests for StreamsProcessDatumExec
+ * These are tests for StreamsProcessDatumExec.
  */
 public class PigProcessDatumTest {
 
-    @Test
-    public void testPigDoNothingSingleDatum() throws Exception {
-        String[] args = {};
+  @Test
+  public void testPigDoNothingSingleDatum() throws Exception {
+    String[] args = {};
 
-        String[] input = {
-                "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{content:\"content\",[\"a\":1,\"b\":\"c\"}",
-        };
+    String[] input = {
+        "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{content:\"content\",[\"a\":1,\"b\":\"c\"}",
+    };
 
-        DoNothingProcessor processor = new DoNothingProcessor();
+    DoNothingProcessor processor = new DoNothingProcessor();
 
-        String doc = (String) StringUtils.split(input[0], '\t').get(3);
-        StreamsDatum inputDatum = new StreamsDatum(doc);
-        inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
+    String doc = (String) StringUtils.split(input[0], '\t').get(3);
+    StreamsDatum inputDatum = new StreamsDatum(doc);
+    inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
 
-        processor.prepare(null);
+    processor.prepare(null);
 
-        StreamsDatum resultDatum = processor.process(inputDatum).get(0);
-        String resultDocument = (String) resultDatum.getDocument();
+    StreamsDatum resultDatum = processor.process(inputDatum).get(0);
+    String resultDocument = (String) resultDatum.getDocument();
 
-        String[] output = new String[1];
-        output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006," + resultDocument + ")";
+    String[] output = new String[1];
+    output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006," + resultDocument + ")";
 
-        PigTest test;
-        test = new PigTest("src/test/resources/pigprocessdatumtest.pig", args);
-        test.assertOutput("in", input, "out", output);
+    PigTest test;
+    test = new PigTest("src/test/resources/pigprocessdatumtest.pig", args);
+    test.assertOutput("in", input, "out", output);
 
-    }
+  }
 
-    @Test
-    public void testPigCopyThriceSingleDatum() throws Exception {
-        String[] args = {};
+  @Test
+  public void testPigCopyThriceSingleDatum() throws Exception {
+    String[] args = {};
 
-        String[] input = {
-                "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{content:\"content\",[\"a\":1,\"b\":\"c\"}",
-        };
+    String[] input = {
+        "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{content:\"content\",[\"a\":1,\"b\":\"c\"}",
+    };
 
-        CopyThriceProcessor processor = new CopyThriceProcessor();
+    CopyThriceProcessor processor = new CopyThriceProcessor();
 
-        String doc = (String) StringUtils.split(input[0], '\t').get(3);
-        StreamsDatum inputDatum = new StreamsDatum(doc);
-        inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
+    String doc = (String) StringUtils.split(input[0], '\t').get(3);
+    StreamsDatum inputDatum = new StreamsDatum(doc);
+    inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
 
-        processor.prepare(null);
+    processor.prepare(null);
 
-        List<StreamsDatum> resultSet = processor.process(inputDatum);
+    List<StreamsDatum> resultSet = processor.process(inputDatum);
 
-        String[] output = new String[resultSet.size()];
+    String[] output = new String[resultSet.size()];
 
-        for( int i = 0; i < output.length; i++ ) {
-            output[i] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006," + resultSet.get(i).getDocument() + ")";
-        }
+    for( int i = 0; i < output.length; i++ ) {
+      output[i] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006," + resultSet.get(i).getDocument() + ")";
+    }
 
-        PigTest test;
-        test = new PigTest("src/test/resources/pigprocessdatumcopytest.pig", args);
-        test.assertOutput("in", input, "out", output);
+    PigTest test;
+    test = new PigTest("src/test/resources/pigprocessdatumcopytest.pig", args);
+    test.assertOutput("in", input, "out", output);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDocumentTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDocumentTest.java b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDocumentTest.java
index dd30eb1..2832fdc 100644
--- a/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDocumentTest.java
+++ b/streams-runtimes/streams-runtime-pig/src/test/java/org/apache/streams/pig/test/PigProcessDocumentTest.java
@@ -19,94 +19,95 @@
 
 package org.apache.streams.pig.test;
 
-import org.apache.pig.pigunit.PigTest;
 import org.apache.streams.core.StreamsDatum;
+
+import org.apache.pig.pigunit.PigTest;
 import org.apache.tools.ant.util.StringUtils;
 import org.junit.Test;
 
 /**
- * These are tests for StreamsProcessDocumentExec
+ * These are tests for StreamsProcessDocumentExec.
  */
 public class PigProcessDocumentTest {
 
-    @Test
-    public void testPigProcessEmptyDocument() throws Exception {
+  @Test
+  public void testPigProcessEmptyDocument() throws Exception {
 
-        String[] input = {
-                "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{}"
-        };
+    String[] input = {
+        "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{}"
+    };
 
-        DoNothingProcessor processor = new DoNothingProcessor();
+    DoNothingProcessor processor = new DoNothingProcessor();
 
-        String doc = (String) StringUtils.split(input[0], '\t').get(3);
-        StreamsDatum inputDatum = new StreamsDatum(doc);
-        inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
+    String doc = (String) StringUtils.split(input[0], '\t').get(3);
+    StreamsDatum inputDatum = new StreamsDatum(doc);
+    inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
 
-        processor.prepare(null);
+    processor.prepare(null);
 
-        StreamsDatum resultDatum = processor.process(inputDatum).get(0);
-        String resultDocument = (String) resultDatum.getDocument();
+    StreamsDatum resultDatum = processor.process(inputDatum).get(0);
+    String resultDocument = (String) resultDatum.getDocument();
 
-        String[] output = new String[1];
-        output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006,"+resultDocument+")";
+    String[] output = new String[1];
+    output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006,"+resultDocument+")";
 
-        PigTest test;
-        test = new PigTest("src/test/resources/pigprocessdocumenttest.pig");
-        test.assertOutput("in", input, "out", output);
+    PigTest test;
+    test = new PigTest("src/test/resources/pigprocessdocumenttest.pig");
+    test.assertOutput("in", input, "out", output);
 
-    }
+  }
 
-    @Test
-    public void testPigProcessJsonDocument() throws Exception {
+  @Test
+  public void testPigProcessJsonDocument() throws Exception {
 
-        String[] input = {
-                "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{content:\"content\",[\"a\":1,\"b\":\"c\"}"
-        };
+    String[] input = {
+        "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\t{content:\"content\",[\"a\":1,\"b\":\"c\"}"
+    };
 
-        DoNothingProcessor processor = new DoNothingProcessor();
+    DoNothingProcessor processor = new DoNothingProcessor();
 
-        String doc = (String) StringUtils.split(input[0], '\t').get(3);
-        StreamsDatum inputDatum = new StreamsDatum(doc);
-        inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
+    String doc = (String) StringUtils.split(input[0], '\t').get(3);
+    StreamsDatum inputDatum = new StreamsDatum(doc);
+    inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
 
-        processor.prepare(null);
+    processor.prepare(null);
 
-        StreamsDatum resultDatum = processor.process(inputDatum).get(0);
-        String resultDocument = (String) resultDatum.getDocument();
+    StreamsDatum resultDatum = processor.process(inputDatum).get(0);
+    String resultDocument = (String) resultDatum.getDocument();
 
-        String[] output = new String[1];
-        output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006,"+resultDocument+")";
+    String[] output = new String[1];
+    output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006,"+resultDocument+")";
 
-        PigTest test;
-        test = new PigTest("src/test/resources/pigprocessdocumenttest.pig");
-        test.assertOutput("in", input, "out", output);
+    PigTest test;
+    test = new PigTest("src/test/resources/pigprocessdocumenttest.pig");
+    test.assertOutput("in", input, "out", output);
 
-    }
+  }
 
-    @Test
-    public void testPigProcessAppendDocument() throws Exception {
+  @Test
+  public void testPigProcessAppendDocument() throws Exception {
 
-        String[] input = {
-                "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\thowdy"
-        };
+    String[] input = {
+        "159475541894897679\ttwitter,statuses/user_timeline\t1384499359006\thowdy"
+    };
 
-        AppendStringProcessor processor = new AppendStringProcessor();
+    AppendStringProcessor processor = new AppendStringProcessor();
 
-        String doc = (String) StringUtils.split(input[0], '\t').get(3);
-        StreamsDatum inputDatum = new StreamsDatum(doc);
-        inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
+    String doc = (String) StringUtils.split(input[0], '\t').get(3);
+    StreamsDatum inputDatum = new StreamsDatum(doc);
+    inputDatum.setId((String) StringUtils.split(input[0], '\t').get(0));
 
-        processor.prepare(new String[]{"doody"});
+    processor.prepare(new String[]{"doody"});
 
-        StreamsDatum resultDatum = processor.process(inputDatum).get(0);
-        String resultDocument = (String) resultDatum.getDocument();
+    StreamsDatum resultDatum = processor.process(inputDatum).get(0);
+    String resultDocument = (String) resultDatum.getDocument();
 
-        String[] output = new String[1];
-        output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006,"+resultDocument+")";
+    String[] output = new String[1];
+    output[0] = "(159475541894897679,twitter,statuses/user_timeline,1384499359006,"+resultDocument+")";
 
-        PigTest test;
-        test = new PigTest("src/test/resources/pigprocessdocumentappendtest.pig");
-        test.assertOutput("in", input, "out", output);
+    PigTest test;
+    test = new PigTest("src/test/resources/pigprocessdocumentappendtest.pig");
+    test.assertOutput("in", input, "out", output);
 
-    }
+  }
 }



[27/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserActivityCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserActivityCollector.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserActivityCollector.java
index 677b22f..3620346 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserActivityCollector.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserActivityCollector.java
@@ -19,6 +19,12 @@
 
 package com.google.gplus.provider;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+import org.apache.streams.util.api.requests.backoff.impl.ConstantTimeBackOffStrategy;
+
 import com.carrotsearch.randomizedtesting.RandomizedTest;
 import com.carrotsearch.randomizedtesting.annotations.Repeat;
 import com.fasterxml.jackson.databind.DeserializationFeature;
@@ -29,11 +35,7 @@ import com.google.api.services.plus.model.Activity;
 import com.google.api.services.plus.model.ActivityFeed;
 import com.google.common.collect.Lists;
 import com.google.gplus.serializer.util.GPlusActivityDeserializer;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
-import org.apache.streams.util.api.requests.backoff.impl.ConstantTimeBackOffStrategy;
+
 import org.joda.time.DateTime;
 import org.junit.Test;
 import org.mockito.invocation.InvocationOnMock;
@@ -55,231 +57,256 @@ import static org.mockito.Mockito.when;
  */
 public class TestGPlusUserActivityCollector extends RandomizedTest {
 
+  private static final String ACTIVITY_TEMPLATE = "{ \"kind\": \"plus#activity\", \"etag\": \"\\\"Vea_b94Y77GDGgRK7gFNPnolKQw/v1-6aVSBGT4qiStMoz7f2_AN2fM\\\"\", \"title\": \"\", \"published\": \"%s\", \"updated\": \"2014-10-27T06:26:33.927Z\", \"id\": \"z13twrlznpvtzz52w22mdt1y0k3of1djw04\", \"url\": \"https://plus.google.com/116771159471120611293/posts/GR7CGR8N5VL\", \"actor\": { \"id\": \"116771159471120611293\", \"displayName\": \"displayName\", \"url\": \"https://plus.google.com/116771159471120611293\", \"image\": { \"url\": \"https://lh6.googleusercontent.com/-C0fiZBxdvw0/AAAAAAAAAAI/AAAAAAAAJ5k/K4pgR3_-_ms/photo.jpg?sz=50\" } }, \"verb\": \"share\", \"object\": { \"objectType\": \"activity\", \"id\": \"z13zgvtiurjgfti1v234iflghvq2c1dge04\", \"actor\": { \"id\": \"104954254300557350002\", \"displayName\": \"displayName\", \"url\": \"https://plus.google.com/104954254300557350002\", \"image\": { \"url\": \"https://lh4.googleusercontent.com/-SO1scj4p2LA/AAAAAAAAAAI/AAAAAAAAI-s/efA
 9LBVe144/photo.jpg?sz=50\" } }, \"content\": \"\", \"url\": \"https://plus.google.com/104954254300557350002/posts/AwewXhtn7ws\", \"replies\": { \"totalItems\": 0, \"selfLink\": \"https://content.googleapis.com/plus/v1/activities/z13twrlznpvtzz52w22mdt1y0k3of1djw04/comments\" }, \"plusoners\": { \"totalItems\": 9, \"selfLink\": \"https://content.googleapis.com/plus/v1/activities/z13twrlznpvtzz52w22mdt1y0k3of1djw04/people/plusoners\" }, \"resharers\": { \"totalItems\": 0, \"selfLink\": \"https://content.googleapis.com/plus/v1/activities/z13twrlznpvtzz52w22mdt1y0k3of1djw04/people/resharers\" }, \"attachments\": [ { \"objectType\": \"photo\", \"id\": \"104954254300557350002.6074732746360957410\", \"content\": \"26/10/2014 - 1\", \"url\": \"https://plus.google.com/photos/104954254300557350002/albums/6074732747132702225/6074732746360957410\", \"image\": { \"url\": \"https://lh4.googleusercontent.com/-oO3fnARlDm0/VE3JP1xHKeI/AAAAAAAAeCY/-X2jzc6HruA/w506-h750/2014%2B-%2B1\", \"type\": \"ima
 ge/jpeg\" }, \"fullImage\": { \"url\": \"https://lh4.googleusercontent.com/-oO3fnARlDm0/VE3JP1xHKeI/AAAAAAAAeCY/-X2jzc6HruA/w600-h1141/2014%2B-%2B1\", \"type\": \"image/jpeg\", \"height\": 1141, \"width\": 600 } } ] }, \"annotation\": \"Truth \U0001f61c\", \"provider\": { \"title\": \"Reshared Post\" }, \"access\": { \"kind\": \"plus#acl\", \"description\": \"Public\", \"items\": [ { \"type\": \"public\" } ] } }";
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static final String IN_RANGE_IDENTIFIER = "data in range";
 
-    private static final String ACTIVITY_TEMPLATE = "{ \"kind\": \"plus#activity\", \"etag\": \"\\\"Vea_b94Y77GDGgRK7gFNPnolKQw/v1-6aVSBGT4qiStMoz7f2_AN2fM\\\"\", \"title\": \"\", \"published\": \"%s\", \"updated\": \"2014-10-27T06:26:33.927Z\", \"id\": \"z13twrlznpvtzz52w22mdt1y0k3of1djw04\", \"url\": \"https://plus.google.com/116771159471120611293/posts/GR7CGR8N5VL\", \"actor\": { \"id\": \"116771159471120611293\", \"displayName\": \"displayName\", \"url\": \"https://plus.google.com/116771159471120611293\", \"image\": { \"url\": \"https://lh6.googleusercontent.com/-C0fiZBxdvw0/AAAAAAAAAAI/AAAAAAAAJ5k/K4pgR3_-_ms/photo.jpg?sz=50\" } }, \"verb\": \"share\", \"object\": { \"objectType\": \"activity\", \"id\": \"z13zgvtiurjgfti1v234iflghvq2c1dge04\", \"actor\": { \"id\": \"104954254300557350002\", \"displayName\": \"displayName\", \"url\": \"https://plus.google.com/104954254300557350002\", \"image\": { \"url\": \"https://lh4.googleusercontent.com/-SO1scj4p2LA/AAAAAAAAAAI/AAAAAAAAI-s/e
 fA9LBVe144/photo.jpg?sz=50\" } }, \"content\": \"\", \"url\": \"https://plus.google.com/104954254300557350002/posts/AwewXhtn7ws\", \"replies\": { \"totalItems\": 0, \"selfLink\": \"https://content.googleapis.com/plus/v1/activities/z13twrlznpvtzz52w22mdt1y0k3of1djw04/comments\" }, \"plusoners\": { \"totalItems\": 9, \"selfLink\": \"https://content.googleapis.com/plus/v1/activities/z13twrlznpvtzz52w22mdt1y0k3of1djw04/people/plusoners\" }, \"resharers\": { \"totalItems\": 0, \"selfLink\": \"https://content.googleapis.com/plus/v1/activities/z13twrlznpvtzz52w22mdt1y0k3of1djw04/people/resharers\" }, \"attachments\": [ { \"objectType\": \"photo\", \"id\": \"104954254300557350002.6074732746360957410\", \"content\": \"26/10/2014 - 1\", \"url\": \"https://plus.google.com/photos/104954254300557350002/albums/6074732747132702225/6074732746360957410\", \"image\": { \"url\": \"https://lh4.googleusercontent.com/-oO3fnARlDm0/VE3JP1xHKeI/AAAAAAAAeCY/-X2jzc6HruA/w506-h750/2014%2B-%2B1\", \"type\": \"i
 mage/jpeg\" }, \"fullImage\": { \"url\": \"https://lh4.googleusercontent.com/-oO3fnARlDm0/VE3JP1xHKeI/AAAAAAAAeCY/-X2jzc6HruA/w600-h1141/2014%2B-%2B1\", \"type\": \"image/jpeg\", \"height\": 1141, \"width\": 600 } } ] }, \"annotation\": \"Truth \U0001f61c\", \"provider\": { \"title\": \"Reshared Post\" }, \"access\": { \"kind\": \"plus#acl\", \"description\": \"Public\", \"items\": [ { \"type\": \"public\" } ] } }";
-    private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-    private static final String IN_RANGE_IDENTIFIER = "data in range";
-
+  static {
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Activity.class, new GPlusActivityDeserializer());
+    MAPPER.registerModule(simpleModule);
+    MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
 
-    static {
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Activity.class, new GPlusActivityDeserializer());
-        MAPPER.registerModule(simpleModule);
-        MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  /**
+   * Creates a randomized activity and randomized date range.
+   *
+   * <p/>
+   * The activity feed is separated into three chunks,
+   * |. . . data too recent to be in date range . . .||. . . data in date range. . .||. . . data too old to be in date range|
+   * [index 0, ............................................................................................., index length-1]
+   *
+   * <p/>
+   * Inside of those chunks data has no order, but the list is ordered by those three chunks.
+   *
+   * <p/>
+   * The test will check to see if the num of data in the date range make onto the output queue.
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testWithBeforeAndAfterDates() throws InterruptedException {
+    //initialize counts assuming no date ranges will be used
+    int numActivities = randomIntBetween(0, 1000);
+    int numActivitiesInDateRange = numActivities;
+    int numberOutOfRange = 0;
+    int numBeforeRange = 0;
+    int numAfterRange = 0;
+    //determine if date ranges will be used
+    DateTime beforeDate = null;
+    DateTime afterDate = null;
+    if (randomInt() % 2 == 0) {
+      beforeDate = DateTime.now().minusDays(randomIntBetween(1,5));
     }
-
-    /**
-     * Creates a randomized activity and randomized date range.
-     * The activity feed is separated into three chunks,
-     * |. . . data too recent to be in date range . . .||. . . data in date range. . .||. . . data too old to be in date range|
-     * [index 0, ............................................................................................., index length-1]
-     * Inside of those chunks data has no order, but the list is ordered by those three chunks.
-     *
-     * The test will check to see if the num of data in the date range make onto the output queue.
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testWithBeforeAndAfterDates() throws InterruptedException {
-        //initialize counts assuming no date ranges will be used
-        int numActivities = randomIntBetween(0, 1000);
-        int numActivitiesInDateRange = numActivities;
-        int numberOutOfRange = 0;
-        int numBerforeRange = 0;
-        int numAfterRange = 0;
-        //determine if date ranges will be used
-        DateTime beforeDate = null;
-        DateTime afterDate = null;
-        if(randomInt() % 2 == 0) {
-            beforeDate = DateTime.now().minusDays(randomIntBetween(1,5));
-        }
-        if(randomInt() % 2 == 0) {
-            if(beforeDate == null) {
-                afterDate = DateTime.now().minusDays(randomIntBetween(1, 10));
-            } else {
-                afterDate = beforeDate.minusDays(randomIntBetween(1, 10));
-            }
-        }
-        //update counts if date ranges are going to be used.
-        if(beforeDate != null || afterDate != null ) { //assign amount to be in range
-            numActivitiesInDateRange = randomIntBetween(0, numActivities);
-            numberOutOfRange = numActivities - numActivitiesInDateRange;
-        }
-        if(beforeDate == null && afterDate != null) { //assign all out of range to be before the start of the range
-            numBerforeRange = numberOutOfRange;
-        } else if(beforeDate != null && afterDate == null) { //assign all out of range to be after the start of the range
-            numAfterRange = numberOutOfRange;
-        } else if(beforeDate != null && afterDate != null) { //assign half before range and half after the range
-            numAfterRange = (numberOutOfRange / 2) + (numberOutOfRange % 2);
-            numBerforeRange = numberOutOfRange / 2;
-        }
-
-        Plus plus = createMockPlus(numBerforeRange, numAfterRange, numActivitiesInDateRange, afterDate, beforeDate);
-        BackOffStrategy strategy = new ConstantTimeBackOffStrategy(1);
-        BlockingQueue<StreamsDatum> datums = new LinkedBlockingQueue<>();
-        UserInfo userInfo = new UserInfo();
-        userInfo.setUserId("A");
-        userInfo.setAfterDate(afterDate);
-        userInfo.setBeforeDate(beforeDate);
-        GPlusUserActivityCollector collector = new GPlusUserActivityCollector(plus, datums, strategy, userInfo);
-        collector.run();
-
-        assertEquals(numActivitiesInDateRange, datums.size());
-        while(!datums.isEmpty()) {
-            StreamsDatum datum = datums.take();
-            assertNotNull(datum);
-            assertNotNull(datum.getDocument());
-            assertTrue(datum.getDocument() instanceof String);
-            assertTrue(((String)datum.getDocument()).contains(IN_RANGE_IDENTIFIER)); //only in range documents are on the out going queue.
-        }
+    if (randomInt() % 2 == 0) {
+      if (beforeDate == null) {
+        afterDate = DateTime.now().minusDays(randomIntBetween(1, 10));
+      } else {
+        afterDate = beforeDate.minusDays(randomIntBetween(1, 10));
+      }
+    }
+    //update counts if date ranges are going to be used.
+    if (beforeDate != null || afterDate != null ) { //assign amount to be in range
+      numActivitiesInDateRange = randomIntBetween(0, numActivities);
+      numberOutOfRange = numActivities - numActivitiesInDateRange;
+    }
+    if (beforeDate == null && afterDate != null) { //assign all out of range to be before the start of the range
+      numBeforeRange = numberOutOfRange;
+    } else if (beforeDate != null && afterDate == null) { //assign all out of range to be after the start of the range
+      numAfterRange = numberOutOfRange;
+    } else if (beforeDate != null && afterDate != null) { //assign half before range and half after the range
+      numAfterRange = (numberOutOfRange / 2) + (numberOutOfRange % 2);
+      numBeforeRange = numberOutOfRange / 2;
     }
 
+    Plus plus = createMockPlus(numBeforeRange, numAfterRange, numActivitiesInDateRange, afterDate, beforeDate);
+    BackOffStrategy strategy = new ConstantTimeBackOffStrategy(1);
+    BlockingQueue<StreamsDatum> datums = new LinkedBlockingQueue<>();
+    UserInfo userInfo = new UserInfo();
+    userInfo.setUserId("A");
+    userInfo.setAfterDate(afterDate);
+    userInfo.setBeforeDate(beforeDate);
+    GPlusUserActivityCollector collector = new GPlusUserActivityCollector(plus, datums, strategy, userInfo);
+    collector.run();
 
-    private Plus createMockPlus(final int numBefore, final int numAfter, final int numInRange, final DateTime after, final DateTime before) {
-        Plus plus = mock(Plus.class);
-        final Plus.Activities activities = createMockPlusActivities(numBefore, numAfter, numInRange, after, before);
-        doAnswer(new Answer() {
-            @Override
-            public Plus.Activities answer(InvocationOnMock invocationOnMock) throws Throwable {
-                return activities;
-            }
-        }).when(plus).activities();
-        return plus;
+    assertEquals(numActivitiesInDateRange, datums.size());
+    while (!datums.isEmpty()) {
+      StreamsDatum datum = datums.take();
+      assertNotNull(datum);
+      assertNotNull(datum.getDocument());
+      assertTrue(datum.getDocument() instanceof String);
+      assertTrue(((String)datum.getDocument()).contains(IN_RANGE_IDENTIFIER)); //only in range documents are on the out going queue.
     }
+  }
 
-    private Plus.Activities createMockPlusActivities(final int numBefore, final int numAfter, final int numInRange, final DateTime after, final DateTime before) {
-        Plus.Activities activities = mock(Plus.Activities.class);
-        try {
-            Plus.Activities.List list = createMockPlusActivitiesList(numBefore, numAfter, numInRange, after, before);
-            when(activities.list(anyString(), anyString())).thenReturn(list);
-        } catch (IOException ioe) {
-            fail("Should not have thrown exception while creating mock. : "+ioe.getMessage());
-        }
+
+  private Plus createMockPlus(final int numBefore, final int numAfter, final int numInRange, final DateTime after, final DateTime before) {
+    Plus plus = mock(Plus.class);
+    final Plus.Activities activities = createMockPlusActivities(numBefore, numAfter, numInRange, after, before);
+    doAnswer(new Answer() {
+      @Override
+      public Plus.Activities answer(InvocationOnMock invocationOnMock) throws Throwable {
         return activities;
+      }
+    }).when(plus).activities();
+    return plus;
+  }
+
+  private Plus.Activities createMockPlusActivities(
+      final int numBefore,
+      final int numAfter,
+      final int numInRange,
+      final DateTime after,
+      final DateTime before) {
+    Plus.Activities activities = mock(Plus.Activities.class);
+    try {
+      Plus.Activities.List list = createMockPlusActivitiesList(numBefore, numAfter, numInRange, after, before);
+      when(activities.list(anyString(), anyString())).thenReturn(list);
+    } catch (IOException ioe) {
+      fail("Should not have thrown exception while creating mock. : " + ioe.getMessage());
     }
+    return activities;
+  }
 
-    private Plus.Activities.List createMockPlusActivitiesList(final int numBefore, final int numAfter, final int numInRange, final DateTime after, final DateTime before) {
-        Plus.Activities.List list = mock(Plus.Activities.List.class);
-        when(list.setMaxResults(anyLong())).thenReturn(list);
-        when(list.setPageToken(anyString())).thenReturn(list);
-        ActivityFeedAnswer answer = new ActivityFeedAnswer(numBefore, numAfter, numInRange, after, before);
-        try {
-            doAnswer(answer).when(list).execute();
-        } catch (IOException ioe) {
-            fail("Should not have thrown exception while creating mock. : "+ioe.getMessage());
-        }
-        return list;
+  private Plus.Activities.List createMockPlusActivitiesList(
+      final int numBefore,
+      final int numAfter,
+      final int numInRange,
+      final DateTime after,
+      final DateTime before) {
+    Plus.Activities.List list = mock(Plus.Activities.List.class);
+    when(list.setMaxResults(anyLong())).thenReturn(list);
+    when(list.setPageToken(anyString())).thenReturn(list);
+    ActivityFeedAnswer answer = new ActivityFeedAnswer(numBefore, numAfter, numInRange, after, before);
+    try {
+      doAnswer(answer).when(list).execute();
+    } catch (IOException ioe) {
+      fail("Should not have thrown exception while creating mock. : " + ioe.getMessage());
     }
+    return list;
+  }
 
 
-    private static ActivityFeed createMockActivityFeed(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before, boolean page) {
-        ActivityFeed feed = new ActivityFeed();
-        List<Activity> list = Lists.newLinkedList();
-        for(int i=0; i < numAfter; ++i) {
-            DateTime published = before.plus(randomIntBetween(0, Integer.MAX_VALUE));
-            Activity activity = createActivityWithPublishedDate(published);
-            list.add(activity);
-        }
-        for(int i=0; i < numInRange; ++i) {
-            DateTime published = null;
-            if((before == null && after == null) || before == null) {
-                published = DateTime.now(); // no date range or end time date range so just make the time now.
-            } else if(after == null) {
-                published = before.minusMillis(randomIntBetween(1, Integer.MAX_VALUE)); //no beginning to range
-            } else { // has to be in range
-                long range = before.getMillis() - after.getMillis();
-                published = after.plus(range / 2); //in the middle
-            }
-            Activity activity = createActivityWithPublishedDate(published);
-            activity.setTitle(IN_RANGE_IDENTIFIER);
-            list.add(activity);
-        }
-        for(int i=0; i < numBefore; ++i) {
-            DateTime published = after.minusMillis(randomIntBetween(1, Integer.MAX_VALUE));
-            Activity activity = createActivityWithPublishedDate(published);
-            list.add(activity);
-        }
-        if(page) {
-            feed.setNextPageToken("A");
-        } else {
-            feed.setNextPageToken(null);
-        }
-        feed.setItems(list);
-        return feed;
+  private static ActivityFeed createMockActivityFeed(
+      int numBefore,
+      int numAfter,
+      int numInRange,
+      DateTime after,
+      DateTime before,
+      boolean page) {
+    ActivityFeed feed = new ActivityFeed();
+    List<Activity> list = Lists.newLinkedList();
+    for (int i = 0; i < numAfter; ++i) {
+      DateTime published = before.plus(randomIntBetween(0, Integer.MAX_VALUE));
+      Activity activity = createActivityWithPublishedDate(published);
+      list.add(activity);
     }
-
-    private static Activity createActivityWithPublishedDate(DateTime dateTime) {
-        Activity activity = new Activity();
-        activity.setPublished(new com.google.api.client.util.DateTime(dateTime.getMillis()));
-        activity.setId("a");
-        return activity;
+    for (int i = 0; i < numInRange; ++i) {
+      DateTime published = null;
+      if ((before == null && after == null) || before == null) {
+        published = DateTime.now(); // no date range or end time date range so just make the time now.
+      } else if (after == null) {
+        published = before.minusMillis(randomIntBetween(1, Integer.MAX_VALUE)); //no beginning to range
+      } else { // has to be in range
+        long range = before.getMillis() - after.getMillis();
+        published = after.plus(range / 2); //in the middle
+      }
+      Activity activity = createActivityWithPublishedDate(published);
+      activity.setTitle(IN_RANGE_IDENTIFIER);
+      list.add(activity);
     }
+    for (int i = 0; i < numBefore; ++i) {
+      DateTime published = after.minusMillis(randomIntBetween(1, Integer.MAX_VALUE));
+      Activity activity = createActivityWithPublishedDate(published);
+      list.add(activity);
+    }
+    if (page) {
+      feed.setNextPageToken("A");
+    } else {
+      feed.setNextPageToken(null);
+    }
+    feed.setItems(list);
+    return feed;
+  }
 
-    private static class ActivityFeedAnswer implements Answer<ActivityFeed> {
-        private int afterCount = 0;
-        private int beforeCount = 0;
-        private int inCount = 0;
-        private int maxBatch = 100;
+  private static Activity createActivityWithPublishedDate(DateTime dateTime) {
+    Activity activity = new Activity();
+    activity.setPublished(new com.google.api.client.util.DateTime(dateTime.getMillis()));
+    activity.setId("a");
+    return activity;
+  }
 
-        private int numAfter;
-        private int numInRange;
-        private int numBefore;
-        private DateTime after;
-        private DateTime before;
+  private static class ActivityFeedAnswer implements Answer<ActivityFeed> {
+    private int afterCount = 0;
+    private int beforeCount = 0;
+    private int inCount = 0;
+    private int maxBatch = 100;
 
-        private ActivityFeedAnswer(int numBefore, int numAfter, int numInRange, DateTime after, DateTime before) {
-            this.numBefore = numBefore;
-            this.numAfter = numAfter;
-            this.numInRange = numInRange;
-            this.after = after;
-            this.before = before;
-        }
+    private int numAfter;
+    private int numInRange;
+    private int numBefore;
+    private DateTime after;
+    private DateTime before;
 
+    private ActivityFeedAnswer(int numBefore, int numAfter, int numInRange, DateTime after, DateTime before) {
+      this.numBefore = numBefore;
+      this.numAfter = numAfter;
+      this.numInRange = numInRange;
+      this.after = after;
+      this.before = before;
+    }
 
 
 
-        @Override
-        public ActivityFeed answer(InvocationOnMock invocationOnMock) throws Throwable {
-            int totalCount = 0;
-            int batchAfter = 0;
-            int batchBefore = 0;
-            int batchIn = 0;
-            if(afterCount != numAfter) {
-                if(numAfter - afterCount >= maxBatch) {
-                    afterCount += maxBatch;
-                    batchAfter += maxBatch;
-                    totalCount += batchAfter;
-                } else {
-                    batchAfter += numAfter - afterCount;
-                    totalCount += numAfter - afterCount;
-                    afterCount = numAfter;
-                }
-            }
-            if(totalCount < maxBatch && inCount != numInRange) {
-                if(numInRange - inCount >= maxBatch - totalCount) {
-                    inCount += maxBatch - totalCount;
-                    batchIn += maxBatch - totalCount;
-                    totalCount += batchIn;
-                } else {
-                    batchIn += numInRange - inCount;
-                    totalCount += numInRange - inCount;
-                    inCount = numInRange;
-                }
-            }
-            if(totalCount < maxBatch && beforeCount != numBefore) {
-                if(numBefore - batchBefore >= maxBatch - totalCount) {
-                    batchBefore += maxBatch - totalCount;
-                    totalCount = maxBatch;
-                    beforeCount +=batchBefore;
-                } else {
-                    batchBefore += numBefore - beforeCount;
-                    totalCount += numBefore - beforeCount;
-                    beforeCount = numBefore;
-                }
-            }
 
-            return createMockActivityFeed(batchBefore, batchAfter, batchIn, after, before, numAfter != afterCount || inCount != numInRange || beforeCount != numBefore);
+    @Override
+    public ActivityFeed answer(InvocationOnMock invocationOnMock) throws Throwable {
+      int totalCount = 0;
+      int batchAfter = 0;
+      int batchBefore = 0;
+      int batchIn = 0;
+      if (afterCount != numAfter) {
+        if (numAfter - afterCount >= maxBatch) {
+          afterCount += maxBatch;
+          batchAfter += maxBatch;
+          totalCount += batchAfter;
+        } else {
+          batchAfter += numAfter - afterCount;
+          totalCount += numAfter - afterCount;
+          afterCount = numAfter;
         }
+      }
+      if (totalCount < maxBatch && inCount != numInRange) {
+        if (numInRange - inCount >= maxBatch - totalCount) {
+          inCount += maxBatch - totalCount;
+          batchIn += maxBatch - totalCount;
+          totalCount += batchIn;
+        } else {
+          batchIn += numInRange - inCount;
+          totalCount += numInRange - inCount;
+          inCount = numInRange;
+        }
+      }
+      if (totalCount < maxBatch && beforeCount != numBefore) {
+        if (numBefore - batchBefore >= maxBatch - totalCount) {
+          batchBefore += maxBatch - totalCount;
+          totalCount = maxBatch;
+          beforeCount += batchBefore;
+        } else {
+          batchBefore += numBefore - beforeCount;
+          totalCount += numBefore - beforeCount;
+          beforeCount = numBefore;
+        }
+      }
+
+      return createMockActivityFeed(
+          batchBefore,
+          batchAfter,
+          batchIn,
+          after,
+          before,
+          numAfter != afterCount || inCount != numInRange || beforeCount != numBefore);
     }
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserDataCollector.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserDataCollector.java
index 1251b9a..4460fb1 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserDataCollector.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/provider/TestGPlusUserDataCollector.java
@@ -19,13 +19,15 @@
 
 package com.google.gplus.provider;
 
-import com.google.api.client.googleapis.json.GoogleJsonResponseException;
-import com.google.api.services.plus.Plus;
-import com.google.api.services.plus.model.Person;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.google.gplus.configuration.UserInfo;
 import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 import org.apache.streams.util.api.requests.backoff.impl.ConstantTimeBackOffStrategy;
+
+import com.google.api.client.googleapis.json.GoogleJsonResponseException;
+import com.google.api.services.plus.Plus;
+import com.google.api.services.plus.model.Person;
+
 import org.junit.Test;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
@@ -40,110 +42,106 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doAnswer;
-import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
 /**
- * Basic Units for {@link com.google.gplus.provider.GPlusUserDataCollector}
+ * Basic Units for {@link com.google.gplus.provider.GPlusUserDataCollector}.
  */
 public class TestGPlusUserDataCollector {
 
-    private static final String NO_ERROR = "no error";
-
-
-    /**
-     * Test that on success a datum will be added to the queue.
-     * @throws Exception
-     */
-    @Test
-    public void testSucessfullPull() throws Exception {
-        Plus plus = createMockPlus(0, null);
-        BackOffStrategy backOff = new ConstantTimeBackOffStrategy(1);
-        BlockingQueue<StreamsDatum> datums = new LinkedBlockingQueue<>();
-        UserInfo user = new UserInfo();
-        user.setUserId("A");
-
-        GPlusUserDataCollector collector = new GPlusUserDataCollector(plus, backOff, datums, user);
-        collector.run();
-
-        assertEquals(1, datums.size());
-        StreamsDatum datum = datums.take();
-        assertNotNull(datum);
-        assertEquals(NO_ERROR, datum.getId());
-        assertNotNull(datum.getDocument());
-        assertTrue(datum.getDocument() instanceof String);
-    }
-
-    /**
-     * Test that on failure, no datums are output
-     * @throws Exception
-     */
-    @Test
-    public void testFail() throws Exception {
-        Plus plus = createMockPlus(3, mock(GoogleJsonResponseException.class));
-        UserInfo user = new UserInfo();
-        user.setUserId("A");
-        BlockingQueue<StreamsDatum> datums = new LinkedBlockingQueue<>();
-        BackOffStrategy backOffStrategy = new ConstantTimeBackOffStrategy(1);
-
-        GPlusUserDataCollector collector = new GPlusUserDataCollector(plus, backOffStrategy, datums, user);
-        collector.run();
-
-        assertEquals(0, datums.size());
-    }
-
-
-
-    private Plus createMockPlus(final int succedOnTry, final Throwable throwable) {
-        Plus plus = mock(Plus.class);
-        doAnswer(new Answer() {
-            @Override
-            public Plus.People answer(InvocationOnMock invocationOnMock) throws Throwable {
-                return createMockPeople(succedOnTry, throwable);
-            }
-        }).when(plus).people();
-        return plus;
-    }
-
-    private Plus.People createMockPeople(final int succedOnTry, final Throwable throwable) {
-        Plus.People people = mock(Plus.People.class);
-        try {
-            when(people.get(anyString())).thenAnswer(new Answer<Plus.People.Get>() {
-                @Override
-                public Plus.People.Get answer(InvocationOnMock invocationOnMock) throws Throwable {
-                    return createMockGetNoError(succedOnTry, throwable);
-                }
-            });
-        } catch (IOException ioe) {
-            fail("No Excpetion should have been thrown while creating mocks");
+  private static final String NO_ERROR = "no error";
+
+  /**
+   * Test that on success a datum will be added to the queue.
+   * @throws Exception Exception
+   */
+  @Test
+  public void testSucessfullPull() throws Exception {
+    Plus plus = createMockPlus(0, null);
+    BackOffStrategy backOff = new ConstantTimeBackOffStrategy(1);
+    BlockingQueue<StreamsDatum> datums = new LinkedBlockingQueue<>();
+    UserInfo user = new UserInfo();
+    user.setUserId("A");
+
+    GPlusUserDataCollector collector = new GPlusUserDataCollector(plus, backOff, datums, user);
+    collector.run();
+
+    assertEquals(1, datums.size());
+    StreamsDatum datum = datums.take();
+    assertNotNull(datum);
+    assertEquals(NO_ERROR, datum.getId());
+    assertNotNull(datum.getDocument());
+    assertTrue(datum.getDocument() instanceof String);
+  }
+
+  /**
+   * Test that on failure, no datums are output.
+   * @throws Exception Exception
+   */
+  @Test
+  public void testFail() throws Exception {
+    Plus plus = createMockPlus(3, mock(GoogleJsonResponseException.class));
+    UserInfo user = new UserInfo();
+    user.setUserId("A");
+    BlockingQueue<StreamsDatum> datums = new LinkedBlockingQueue<>();
+    BackOffStrategy backOffStrategy = new ConstantTimeBackOffStrategy(1);
+
+    GPlusUserDataCollector collector = new GPlusUserDataCollector(plus, backOffStrategy, datums, user);
+    collector.run();
+
+    assertEquals(0, datums.size());
+  }
+
+  private Plus createMockPlus(final int succedOnTry, final Throwable throwable) {
+    Plus plus = mock(Plus.class);
+    doAnswer(new Answer() {
+      @Override
+      public Plus.People answer(InvocationOnMock invocationOnMock) throws Throwable {
+        return createMockPeople(succedOnTry, throwable);
+      }
+    }).when(plus).people();
+    return plus;
+  }
+
+  private Plus.People createMockPeople(final int succedOnTry, final Throwable throwable) {
+    Plus.People people = mock(Plus.People.class);
+    try {
+      when(people.get(anyString())).thenAnswer(new Answer<Plus.People.Get>() {
+        @Override
+        public Plus.People.Get answer(InvocationOnMock invocationOnMock) throws Throwable {
+          return createMockGetNoError(succedOnTry, throwable);
         }
-        return people;
+      });
+    } catch (IOException ioe) {
+      fail("No Excpetion should have been thrown while creating mocks");
     }
-
-    private Plus.People.Get createMockGetNoError(final int succedOnTry, final Throwable throwable) {
-        Plus.People.Get get = mock(Plus.People.Get.class);
-        try {
-            doAnswer(new Answer() {
-                private int counter =0;
-
-                @Override
-                public Person answer(InvocationOnMock invocationOnMock) throws Throwable {
-                    if(counter == succedOnTry) {
-                        Person p = new Person();
-                        p.setId(NO_ERROR);
-                        return p;
-                    } else {
-                        ++counter;
-                        throw throwable;
-                    }
-                }
-            }).when(get).execute();
-        } catch (IOException ioe) {
-            fail("No Excpetion should have been thrown while creating mocks");
+    return people;
+  }
+
+  private Plus.People.Get createMockGetNoError(final int succedOnTry, final Throwable throwable) {
+    Plus.People.Get get = mock(Plus.People.Get.class);
+    try {
+      doAnswer(new Answer() {
+        private int counter = 0;
+
+        @Override
+        public Person answer(InvocationOnMock invocationOnMock) throws Throwable {
+          if (counter == succedOnTry) {
+            Person person = new Person();
+            person.setId(NO_ERROR);
+            return person;
+          } else {
+            ++counter;
+            throw throwable;
+          }
         }
-        return get;
+      }).when(get).execute();
+    } catch (IOException ioe) {
+      fail("No Excpetion should have been thrown while creating mocks");
     }
+    return get;
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/serializer/util/GPlusEventClassifierTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/serializer/util/GPlusEventClassifierTest.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/serializer/util/GPlusEventClassifierTest.java
index 8b4c29b..96a9d89 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/serializer/util/GPlusEventClassifierTest.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/com/google/gplus/serializer/util/GPlusEventClassifierTest.java
@@ -18,53 +18,58 @@
 
 package com.google.gplus.serializer.util;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.api.services.plus.model.Activity;
 import com.google.api.services.plus.model.Person;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * GPlusEventClassifierTest tests GPlusEventClassifier.
+ */
 public class GPlusEventClassifierTest {
-    private static StreamsJacksonMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Test
-    public void classifyActivityTest() {
-        try {
-            Activity activity = new Activity();
-            activity.setKind("plus#activity");
-            Class retClass = GPlusEventClassifier.detectClass(mapper.writeValueAsString(activity));
+  private static StreamsJacksonMapper mapper = StreamsJacksonMapper.getInstance();
+
+  @Test
+  public void classifyActivityTest() {
+    try {
+      Activity activity = new Activity();
+      activity.setKind("plus#activity");
+      Class retClass = GPlusEventClassifier.detectClass(mapper.writeValueAsString(activity));
 
-            assertEquals(retClass, Activity.class);
-        } catch(Exception e) {
-            //
-        }
+      assertEquals(retClass, Activity.class);
+    } catch (Exception ex) {
+      //
     }
+  }
 
-    @Test
-    public void classifyPersonTest() {
-        try {
-            Person person = new Person();
-            person.setKind("plus#person");
-            Class retClass = GPlusEventClassifier.detectClass(mapper.writeValueAsString(person));
+  @Test
+  public void classifyPersonTest() {
+    try {
+      Person person = new Person();
+      person.setKind("plus#person");
+      Class retClass = GPlusEventClassifier.detectClass(mapper.writeValueAsString(person));
 
-            assertEquals(retClass, Person.class);
-        } catch(Exception e) {
-            //
-        }
+      assertEquals(retClass, Person.class);
+    } catch (Exception ex) {
+      //
     }
+  }
 
-    @Test
-    public void classifObjectNodeTest() {
-        try {
-            Person person = new Person();
-            person.setKind("fake");
-            Class retClass = GPlusEventClassifier.detectClass(mapper.writeValueAsString(person));
+  @Test
+  public void classifyObjectNodeTest() {
+    try {
+      Person person = new Person();
+      person.setKind("fake");
+      Class retClass = GPlusEventClassifier.detectClass(mapper.writeValueAsString(person));
 
-            assertEquals(retClass, ObjectNode.class);
-        } catch(Exception e) {
-            //
-        }
+      assertEquals(retClass, ObjectNode.class);
+    } catch (Exception ex) {
+      //
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserActivityProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserActivityProviderIT.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserActivityProviderIT.java
index 51caa64..4b642ab 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserActivityProviderIT.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserActivityProviderIT.java
@@ -29,39 +29,39 @@ import java.io.LineNumberReader;
 
 public class GPlusUserActivityProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(GPlusUserActivityProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusUserActivityProviderIT.class);
 
-    @Test
-    public void testGPlusUserActivityProvider() throws Exception {
+  @Test
+  public void testGPlusUserActivityProvider() throws Exception {
 
-        String configfile = "./target/test-classes/GPlusUserActivityProviderIT.conf";
-        String outfile = "./target/test-classes/GPlusUserActivityProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/GPlusUserActivityProviderIT.conf";
+    String outfile = "./target/test-classes/GPlusUserActivityProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                GPlusUserActivityProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        GPlusUserActivityProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 1);
+    assert (outCounter.getLineNumber() >= 1);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserDataProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserDataProviderIT.java b/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserDataProviderIT.java
index b367baa..fd4ddd5 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserDataProviderIT.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/test/java/org/apache/streams/gplus/test/providers/GPlusUserDataProviderIT.java
@@ -30,41 +30,41 @@ import java.io.LineNumberReader;
 
 public class GPlusUserDataProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(GPlusUserDataProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusUserDataProviderIT.class);
 
-    @Test
-    public void testGPlusUserDataProvider() throws Exception {
+  @Test
+  public void testGPlusUserDataProvider() throws Exception {
 
-        String configfile = "./target/test-classes/GPlusUserDataProviderIT.conf";
-        String outfile = "./target/test-classes/GPlusUserDataProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/GPlusUserDataProviderIT.conf";
+    String outfile = "./target/test-classes/GPlusUserDataProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                GPlusUserDataProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        GPlusUserDataProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        GPlusUserDataProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
+    GPlusUserDataProvider.main(Lists.newArrayList(configfile, outfile).toArray(new String[2]));
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 1);
+    assert (outCounter.getLineNumber() >= 1);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/processor/InstagramTypeConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/processor/InstagramTypeConverter.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/processor/InstagramTypeConverter.java
index 17af5f6..6fd6b4e 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/processor/InstagramTypeConverter.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/processor/InstagramTypeConverter.java
@@ -18,88 +18,91 @@
 
 package org.apache.streams.instagram.processor;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.instagram.serializer.InstagramMediaFeedDataConverter;
 import org.apache.streams.instagram.serializer.InstagramUserInfoDataConverter;
-import org.apache.streams.instagram.serializer.util.InstagramActivityUtil;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
+import com.google.common.collect.Lists;
 import org.jinstagram.entity.users.basicinfo.UserInfoData;
 import org.jinstagram.entity.users.feed.MediaFeedData;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.List;
-import java.util.Queue;
 
 /**
- * This is deprecated - use ActivityConverterProcessor or ActivityObjectConverterProcessor
+ * This is deprecated - use ActivityConverterProcessor or ActivityObjectConverterProcessor.
  */
 @Deprecated
 public class InstagramTypeConverter implements StreamsProcessor {
 
-    public final static String STREAMS_ID = "InstagramTypeConverter";
+  public static final String STREAMS_ID = "InstagramTypeConverter";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(InstagramTypeConverter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramTypeConverter.class);
 
-    private InstagramMediaFeedDataConverter mediaFeedDataConverter;
-    private InstagramUserInfoDataConverter userInfoDataConverter;
+  private InstagramMediaFeedDataConverter mediaFeedDataConverter;
+  private InstagramUserInfoDataConverter userInfoDataConverter;
 
-    public final static String TERMINATE = new String("TERMINATE");
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public static final String TERMINATE = new String("TERMINATE");
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        StreamsDatum result = null;
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        try {
-            Object item = entry.getDocument();
+    StreamsDatum result = null;
 
-            LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
-            if(item instanceof MediaFeedData) {
+    try {
+      Object item = entry.getDocument();
 
-                //We don't need to use the mapper, since we have a process to convert between
-                //MediaFeedData objects and Activity objects already
-                List<Activity> activity = mediaFeedDataConverter.toActivityList((MediaFeedData)item);
+      LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
+      if (item instanceof MediaFeedData) {
 
-                if( activity.size() > 0 ) result = new StreamsDatum(activity);
+        //We don't need to use the mapper, since we have a process to convert between
+        //MediaFeedData objects and Activity objects already
+        List<Activity> activity = mediaFeedDataConverter.toActivityList((MediaFeedData)item);
 
-            } else if(item instanceof UserInfoData) {
-
-                ActivityObject activityObject = userInfoDataConverter.toActivityObject((UserInfoData)item);
+        if ( activity.size() > 0 ) {
+          result = new StreamsDatum(activity);
+        }
 
-                if( activityObject != null ) result = new StreamsDatum(activityObject);
+      } else if (item instanceof UserInfoData) {
 
-            }
+        ActivityObject activityObject = userInfoDataConverter.toActivityObject((UserInfoData)item);
 
-        } catch (Exception e) {
-            e.printStackTrace();
-            LOGGER.error("Exception while converting item: {}", e.getMessage());
+        if ( activityObject != null ) {
+          result = new StreamsDatum(activityObject);
         }
 
-        if( result != null ) {
-            return Lists.newArrayList(result);
-        } else
-            return Lists.newArrayList();
-
-    }
+      }
 
-    @Override
-    public void prepare(Object o) {
-        mediaFeedDataConverter = new InstagramMediaFeedDataConverter();
-        userInfoDataConverter = new InstagramUserInfoDataConverter();
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      LOGGER.error("Exception while converting item: {}", ex.getMessage());
     }
 
-    @Override
-    public void cleanUp() {
-        //noop
+    if ( result != null ) {
+      return Lists.newArrayList(result);
+    } else {
+      return Lists.newArrayList();
     }
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    mediaFeedDataConverter = new InstagramMediaFeedDataConverter();
+    userInfoDataConverter = new InstagramUserInfoDataConverter();
+  }
+
+  @Override
+  public void cleanUp() {
+    //noop
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramAbstractProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramAbstractProvider.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramAbstractProvider.java
index 0c7ba95..025af18 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramAbstractProvider.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramAbstractProvider.java
@@ -12,12 +12,9 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
+
 package org.apache.streams.instagram.provider;
 
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
@@ -28,6 +25,11 @@ import org.apache.streams.instagram.User;
 import org.apache.streams.instagram.UsersInfo;
 import org.apache.streams.util.ComponentUtils;
 import org.apache.streams.util.SerializationUtil;
+
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -53,178 +55,172 @@ import java.util.concurrent.atomic.AtomicBoolean;
  */
 public abstract class InstagramAbstractProvider implements StreamsProvider {
 
-    public static final String STREAMS_ID = "InstagramAbstractProvider";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramAbstractProvider.class);
-
-    private static final int MAX_BATCH_SIZE = 2000;
-
-    protected InstagramConfiguration config;
-    protected Queue<StreamsDatum> dataQueue;
-    private ListeningExecutorService executorService;
-
-    private List<ListenableFuture<Object>> futures = new ArrayList<>();
-
-    private AtomicBoolean isCompleted;
-
-    public InstagramAbstractProvider() {
-        this.config = new ComponentConfigurator<>(InstagramConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("instagram"));
-    }
-
-    public InstagramAbstractProvider(InstagramConfiguration config) {
-        this.config = SerializationUtil.cloneBySerialization(config);
-    }
-
-    public static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
-        return new ThreadPoolExecutor(nThreads, nThreads,
-                5000L, TimeUnit.MILLISECONDS,
-                new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public void startStream() {
-        InstagramDataCollector dataCollector = getInstagramDataCollector();
-        this.executorService = MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor());
-        ListenableFuture future = this.executorService.submit(dataCollector);
-        this.futures.add(future);
-        executorService.shutdown();
-    }
-
-    /**
-     * Return the data collector to use to connect to instagram.
-     * @return {@link InstagramDataCollector}
-     */
-    protected abstract InstagramDataCollector getInstagramDataCollector();
-
-
-    @Override
-    public StreamsResultSet readCurrent() {
-        Queue<StreamsDatum> batch = new ConcurrentLinkedQueue<>();
-        int count = 0;
-        while(!this.dataQueue.isEmpty() && count < MAX_BATCH_SIZE) {
-            ComponentUtils.offerUntilSuccess(ComponentUtils.pollWhileNotEmpty(this.dataQueue), batch);
-            ++count;
-        }
-        return new StreamsResultSet(batch);
-    }
-
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
-
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        this.dataQueue = new ConcurrentLinkedQueue<>();
-        this.isCompleted = new AtomicBoolean(false);
-    }
-
-    @Override
-    public void cleanUp() {
-        try {
-            ComponentUtils.shutdownExecutor(this.executorService, 5, 5);
-        } finally {
-            this.executorService = null;
-        }
-    }
-
-    /**
-     * Add default start and stop points if necessary.
-     */
-    private void updateUserInfoList() {
-        UsersInfo usersInfo = this.config.getUsersInfo();
-        if(usersInfo.getDefaultAfterDate() == null && usersInfo.getDefaultBeforeDate() == null) {
-            return;
-        }
-        DateTime defaultAfterDate = usersInfo.getDefaultAfterDate();
-        DateTime defaultBeforeDate = usersInfo.getDefaultBeforeDate();
-        for(User user : usersInfo.getUsers()) {
-            if(defaultAfterDate != null && user.getAfterDate() == null) {
-                user.setAfterDate(defaultAfterDate);
-            }
-            if(defaultBeforeDate != null && user.getBeforeDate() == null) {
-                user.setBeforeDate(defaultBeforeDate);
-            }
-        }
-    }
-
-    /**
-     * Overrides the client id in the configuration.
-     * @param clientId client id to use
-     */
-    public void setInstagramClientId(String clientId) {
-        this.config.setClientId(clientId);
-    }
-
-    /**
-     * Overrides authroized user tokens in the configuration.
-     * @param tokenStrings
-     */
-    public void setAuthorizedUserTokens(Collection<String> tokenStrings) {
-        ensureUsersInfo(this.config).setAuthorizedTokens(new HashSet<>(tokenStrings));
-    }
-
-    /**
-     * Overrides the default before date in the configuration
-     * @param beforeDate
-     */
-    public void setDefaultBeforeDate(DateTime beforeDate) {
-        ensureUsersInfo(this.config).setDefaultBeforeDate(beforeDate);
-    }
-
-    /**
-     * Overrides the default after date in the configuration
-     * @param afterDate
-     */
-    public void setDefaultAfterDate(DateTime afterDate) {
-        ensureUsersInfo(this.config).setDefaultAfterDate(afterDate);
-    }
-
-    /**
-     * Overrides the users in the configuration and sets the after date for each user. A NULL DateTime implies
-     * pull data from as early as possible.  If default before or after DateTimes are set, they will applied to all
-     * NULL DateTimes.
-     * @param usersWithAfterDate instagram user id mapped to BeforeDate time
-     */
-    public void setUsersWithAfterDate(Map<String, DateTime> usersWithAfterDate) {
-        Set<User> users = new HashSet<>();
-        for(String userId : usersWithAfterDate.keySet()) {
-            User user = new User();
-            user.setUserId(userId);
-            user.setAfterDate(usersWithAfterDate.get(userId));
-            users.add(user);
-        }
-        ensureUsersInfo(this.config).setUsers(users);
-    }
-
-    private UsersInfo ensureUsersInfo(InstagramConfiguration config) {
-        UsersInfo usersInfo = config.getUsersInfo();
-        if(usersInfo == null) {
-            usersInfo = new UsersInfo();
-            config.setUsersInfo(usersInfo);
-        }
-        return usersInfo;
-    }
-
-    @Override
-    public boolean isRunning() {
-        if (dataQueue.isEmpty() && executorService.isTerminated() && Futures.allAsList(futures).isDone()) {
-            LOGGER.info("Completed");
-            isCompleted.set(true);
-            LOGGER.info("Exiting");
-        }
-        return !isCompleted.get();
-    }
+  public static final String STREAMS_ID = "InstagramAbstractProvider";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramAbstractProvider.class);
+
+  private static final int MAX_BATCH_SIZE = 2000;
+
+  protected InstagramConfiguration config;
+  protected Queue<StreamsDatum> dataQueue;
+  private ListeningExecutorService executorService;
+
+  private List<ListenableFuture<Object>> futures = new ArrayList<>();
+
+  private AtomicBoolean isCompleted;
+
+  public InstagramAbstractProvider() {
+    this.config = new ComponentConfigurator<>(InstagramConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("instagram"));
+  }
+
+  public InstagramAbstractProvider(InstagramConfiguration config) {
+    this.config = SerializationUtil.cloneBySerialization(config);
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+    InstagramDataCollector dataCollector = getInstagramDataCollector();
+    this.executorService = MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor());
+    ListenableFuture future = this.executorService.submit(dataCollector);
+    this.futures.add(future);
+    executorService.shutdown();
+  }
+
+  /**
+   * Return the data collector to use to connect to instagram.
+   * @return {@link InstagramDataCollector}
+   */
+  protected abstract InstagramDataCollector getInstagramDataCollector();
+
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    Queue<StreamsDatum> batch = new ConcurrentLinkedQueue<>();
+    int count = 0;
+    while (!this.dataQueue.isEmpty() && count < MAX_BATCH_SIZE) {
+      ComponentUtils.offerUntilSuccess(ComponentUtils.pollWhileNotEmpty(this.dataQueue), batch);
+      ++count;
+    }
+    return new StreamsResultSet(batch);
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.dataQueue = new ConcurrentLinkedQueue<>();
+    this.isCompleted = new AtomicBoolean(false);
+  }
+
+  @Override
+  public void cleanUp() {
+    try {
+      ComponentUtils.shutdownExecutor(this.executorService, 5, 5);
+    } finally {
+      this.executorService = null;
+    }
+  }
+
+  /**
+   * Add default start and stop points if necessary.
+   */
+  private void updateUserInfoList() {
+    UsersInfo usersInfo = this.config.getUsersInfo();
+    if (usersInfo.getDefaultAfterDate() == null && usersInfo.getDefaultBeforeDate() == null) {
+      return;
+    }
+    DateTime defaultAfterDate = usersInfo.getDefaultAfterDate();
+    DateTime defaultBeforeDate = usersInfo.getDefaultBeforeDate();
+    for (User user : usersInfo.getUsers()) {
+      if (defaultAfterDate != null && user.getAfterDate() == null) {
+        user.setAfterDate(defaultAfterDate);
+      }
+      if (defaultBeforeDate != null && user.getBeforeDate() == null) {
+        user.setBeforeDate(defaultBeforeDate);
+      }
+    }
+  }
+
+  /**
+   * Overrides the client id in the configuration.
+   * @param clientId client id to use
+   */
+  public void setInstagramClientId(String clientId) {
+    this.config.setClientId(clientId);
+  }
+
+  /**
+   * Overrides authroized user tokens in the configuration.
+   * @param tokenStrings tokenStrings
+   */
+  public void setAuthorizedUserTokens(Collection<String> tokenStrings) {
+    ensureUsersInfo(this.config).setAuthorizedTokens(new HashSet<>(tokenStrings));
+  }
+
+  /**
+   * Overrides the default before date in the configuration.
+   * @param beforeDate beforeDate
+   */
+  public void setDefaultBeforeDate(DateTime beforeDate) {
+    ensureUsersInfo(this.config).setDefaultBeforeDate(beforeDate);
+  }
+
+  /**
+   * Overrides the default after date in the configuration.
+   * @param afterDate afterDate
+   */
+  public void setDefaultAfterDate(DateTime afterDate) {
+    ensureUsersInfo(this.config).setDefaultAfterDate(afterDate);
+  }
+
+  /**
+   * Overrides the users in the configuration and sets the after date for each user. A NULL DateTime implies
+   * pull data from as early as possible.  If default before or after DateTimes are set, they will applied to all
+   * NULL DateTimes.
+   * @param usersWithAfterDate instagram user id mapped to BeforeDate time
+   */
+  public void setUsersWithAfterDate(Map<String, DateTime> usersWithAfterDate) {
+    Set<User> users = new HashSet<>();
+    for (String userId : usersWithAfterDate.keySet()) {
+      User user = new User();
+      user.setUserId(userId);
+      user.setAfterDate(usersWithAfterDate.get(userId));
+      users.add(user);
+    }
+    ensureUsersInfo(this.config).setUsers(users);
+  }
+
+  private UsersInfo ensureUsersInfo(InstagramConfiguration config) {
+    UsersInfo usersInfo = config.getUsersInfo();
+    if (usersInfo == null) {
+      usersInfo = new UsersInfo();
+      config.setUsersInfo(usersInfo);
+    }
+    return usersInfo;
+  }
+
+  @Override
+  public boolean isRunning() {
+    if (dataQueue.isEmpty() && executorService.isTerminated() && Futures.allAsList(futures).isDone()) {
+      LOGGER.info("Completed");
+      isCompleted.set(true);
+      LOGGER.info("Exiting");
+    }
+    return !isCompleted.get();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramDataCollector.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramDataCollector.java
index 97451f0..1916061 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramDataCollector.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramDataCollector.java
@@ -12,9 +12,9 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
+
 package org.apache.streams.instagram.provider;
 
-import com.google.common.annotations.VisibleForTesting;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.instagram.InstagramConfiguration;
 import org.apache.streams.instagram.User;
@@ -22,13 +22,9 @@ import org.apache.streams.util.ComponentUtils;
 import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 import org.apache.streams.util.api.requests.backoff.impl.ExponentialBackOffStrategy;
 import org.apache.streams.util.oauth.tokens.tokenmanager.SimpleTokenManager;
-import org.apache.streams.util.oauth.tokens.tokenmanager.impl.BasicTokenManger;
+import org.apache.streams.util.oauth.tokens.tokenmanager.impl.BasicTokenManager;
+
 import org.jinstagram.Instagram;
-import org.jinstagram.entity.common.Pagination;
-import org.jinstagram.entity.users.feed.MediaFeed;
-import org.jinstagram.entity.users.feed.MediaFeedData;
-import org.jinstagram.exceptions.InstagramBadRequestException;
-import org.jinstagram.exceptions.InstagramRateLimitException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,102 +40,106 @@ import java.util.concurrent.atomic.AtomicBoolean;
  */
 public abstract class InstagramDataCollector<T> implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramDataCollector.class);
-
-    protected Queue<StreamsDatum> dataQueue; //exposed for testing
-    private InstagramConfiguration config;
-    private AtomicBoolean isCompleted;
-    private SimpleTokenManager<InstagramOauthToken> tokenManger;
-    protected int consecutiveErrorCount;
-    protected BackOffStrategy backOffStrategy;
-    private Instagram instagram;
-
-
-    public InstagramDataCollector(Queue<StreamsDatum> queue, InstagramConfiguration config) {
-        this.dataQueue = queue;
-        this.config = config;
-        this.isCompleted = new AtomicBoolean(false);
-        this.tokenManger = new BasicTokenManger<InstagramOauthToken>();
-        for (String tokens : this.config.getUsersInfo().getAuthorizedTokens()) {
-            this.tokenManger.addTokenToPool(new InstagramOauthToken(tokens));
-        }
-        this.consecutiveErrorCount = 0;
-        this.backOffStrategy = new ExponentialBackOffStrategy(2);
-        this.instagram = new Instagram(this.config.getClientId());
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramDataCollector.class);
+
+  protected Queue<StreamsDatum> dataQueue; //exposed for testing
+  private InstagramConfiguration config;
+  private AtomicBoolean isCompleted;
+  private SimpleTokenManager<InstagramOauthToken> tokenManger;
+  protected int consecutiveErrorCount;
+  protected BackOffStrategy backOffStrategy;
+  private Instagram instagram;
+
+  /**
+   * InstagramDataCollector constructor.
+   * @param queue Queue of StreamsDatum
+   * @param config InstagramConfiguration
+   */
+  public InstagramDataCollector(Queue<StreamsDatum> queue, InstagramConfiguration config) {
+    this.dataQueue = queue;
+    this.config = config;
+    this.isCompleted = new AtomicBoolean(false);
+    this.tokenManger = new BasicTokenManager<InstagramOauthToken>();
+    for (String tokens : this.config.getUsersInfo().getAuthorizedTokens()) {
+      this.tokenManger.addTokenToPool(new InstagramOauthToken(tokens));
     }
-
-
-    /**
-     * If there are authorized tokens available, it sets a new token for the client and returns
-     * the client.  If there are no available tokens, it simply returns the client that was
-     * initialized in the constructor with client id.
-     * @return
-     */
-    protected Instagram getNextInstagramClient() {
-        if(this.tokenManger.numAvailableTokens() > 0) {
-            this.instagram.setAccessToken(this.tokenManger.getNextAvailableToken());
-        }
-        return this.instagram;
+    this.consecutiveErrorCount = 0;
+    this.backOffStrategy = new ExponentialBackOffStrategy(2);
+    this.instagram = new Instagram(this.config.getClientId());
+  }
+
+
+  /**
+   * If there are authorized tokens available, it sets a new token for the client and returns
+   * the client.  If there are no available tokens, it simply returns the client that was
+   * initialized in the constructor with client id.
+   * @return result
+   */
+  protected Instagram getNextInstagramClient() {
+    if (this.tokenManger.numAvailableTokens() > 0) {
+      this.instagram.setAccessToken(this.tokenManger.getNextAvailableToken());
     }
-
-    /**
-     * Return the number of available tokens for this data collector
-     * @return numbeer of available tokens
-     */
-    protected int numAvailableTokens() {
-        return this.tokenManger.numAvailableTokens();
+    return this.instagram;
+  }
+
+  /**
+   * Return the number of available tokens for this data collector.
+   * @return numbeer of available tokens
+   */
+  protected int numAvailableTokens() {
+    return this.tokenManger.numAvailableTokens();
+  }
+
+  /**
+   * Queues the Instagram data to be output by the provider.
+   * @param userData data to queue
+   * @param userId user id who the data came from
+   */
+  protected void queueData(Collection<T> userData, String userId) {
+    if (userData == null) {
+      LOGGER.warn("User id, {}, returned a NULL data from instagram.", userId);
+    } else {
+      for (T data : userData) {
+        ComponentUtils.offerUntilSuccess(convertToStreamsDatum(data), this.dataQueue);
+      }
     }
-
-    /**
-     * Queues the Instagram data to be output by the provider.
-     * @param userData data to queue
-     * @param userId user id who the data came from
-     */
-    protected void queueData(Collection<T> userData, String userId) {
-        if (userData == null) {
-            LOGGER.warn("User id, {}, returned a NULL data from instagram.", userId);
-        } else {
-            for (T data : userData) {
-                ComponentUtils.offerUntilSuccess(convertToStreamsDatum(data), this.dataQueue);
-            }
-        }
+  }
+
+  /**
+   * @return true when the collector has queued all of the available Instagram data for the provided users.
+   */
+  public boolean isCompleted() {
+    return this.isCompleted.get();
+  }
+
+  @Override
+  public void run() {
+    for (User user : this.config.getUsersInfo().getUsers()) {
+      try {
+        collectInstagramDataForUser(user);
+      } catch (InterruptedException ie) {
+        Thread.currentThread().interrupt();
+      } catch (Exception ex) {
+        LOGGER.error("Exception thrown while polling for user, {}, skipping user.", user.getUserId());
+        LOGGER.error("Exception thrown while polling for user : ", ex);
+      }
     }
-
-    /**
-     * @return true when the collector has queued all of the available Instagram data for the provided users.
-     */
-    public boolean isCompleted() {
-        return this.isCompleted.get();
-    }
-
-    @Override
-    public void run() {
-        for (User user : this.config.getUsersInfo().getUsers()) {
-            try {
-                collectInstagramDataForUser(user);
-            } catch (InterruptedException ie) {
-                Thread.currentThread().interrupt();
-            } catch (Exception e) {
-                LOGGER.error("Exception thrown while polling for user, {}, skipping user.", user.getUserId());
-                LOGGER.error("Exception thrown while polling for user : ", e);
-            }
-        }
-        this.isCompleted.set(true);
-    }
-
-    /**
-     * Pull instagram data for a user and queues the resulting data.
-     * @param user
-     * @throws Exception
-     */
-    protected abstract void collectInstagramDataForUser(User user) throws Exception;
-
-    /**
-     * Takes an Instagram Object and sets it as the document of a streams datum and sets the id of the streams datum.
-     * @param item
-     * @return
-     */
-    protected abstract StreamsDatum convertToStreamsDatum(T item);
+    this.isCompleted.set(true);
+  }
+
+  /**
+   * Pull instagram data for a user and queues the resulting data.
+   * @param user
+   * @throws Exception
+   */
+  protected abstract void collectInstagramDataForUser(User user) throws Exception;
+
+  /**
+   * Takes an Instagram Object and sets it as the document of a streams datum and sets the id of the streams datum.
+   * @param item
+   * @return
+   */
+  protected abstract StreamsDatum convertToStreamsDatum(T item);
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramOauthToken.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramOauthToken.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramOauthToken.java
index 4531cfe..959b240 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramOauthToken.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/InstagramOauthToken.java
@@ -12,8 +12,8 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
-package org.apache.streams.instagram.provider;
 
+package org.apache.streams.instagram.provider;
 
 import org.jinstagram.auth.model.Token;
 
@@ -23,21 +23,21 @@ import org.jinstagram.auth.model.Token;
  */
 public class InstagramOauthToken extends Token {
 
-    public InstagramOauthToken(String token) {
-        this(token, null);
-    }
+  public InstagramOauthToken(String token) {
+    this(token, null);
+  }
 
-    public InstagramOauthToken(String token, String secret) {
-        super(token, secret);
-    }
+  public InstagramOauthToken(String token, String secret) {
+    super(token, secret);
+  }
 
-    @Override
-    public boolean equals(Object o) {
-        if(!(o instanceof InstagramOauthToken)) {
-            return false;
-        }
-        InstagramOauthToken that = (InstagramOauthToken) o;
-        return this.getToken().equals(that.getToken());
+  @Override
+  public boolean equals(Object object) {
+    if (!(object instanceof InstagramOauthToken)) {
+      return false;
     }
+    InstagramOauthToken that = (InstagramOauthToken) object;
+    return this.getToken().equals(that.getToken());
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaCollector.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaCollector.java
index e946e6b..b1e4593 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaCollector.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaCollector.java
@@ -12,13 +12,14 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
+
 package org.apache.streams.instagram.provider.recentmedia;
 
-import com.google.common.annotations.VisibleForTesting;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.instagram.InstagramConfiguration;
 import org.apache.streams.instagram.User;
 import org.apache.streams.instagram.provider.InstagramDataCollector;
+
 import org.jinstagram.entity.common.Pagination;
 import org.jinstagram.entity.users.feed.MediaFeed;
 import org.jinstagram.entity.users.feed.MediaFeedData;
@@ -37,75 +38,77 @@ import java.util.Queue;
  */
 public class InstagramRecentMediaCollector extends InstagramDataCollector<MediaFeedData> {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramRecentMediaCollector.class);
-    protected static final int MAX_ATTEMPTS = 5;
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramRecentMediaCollector.class);
+  protected static final int MAX_ATTEMPTS = 5;
 
-    private int consecutiveErrorCount;
+  private int consecutiveErrorCount;
 
 
-    public InstagramRecentMediaCollector(Queue<StreamsDatum> queue, InstagramConfiguration config) {
-        super(queue, config);
-    }
+  public InstagramRecentMediaCollector(Queue<StreamsDatum> queue, InstagramConfiguration config) {
+    super(queue, config);
+  }
 
-    @Override
-    protected StreamsDatum convertToStreamsDatum(MediaFeedData item) {
-        return new StreamsDatum(item, item.getId());
-    }
+  @Override
+  protected StreamsDatum convertToStreamsDatum(MediaFeedData item) {
+    return new StreamsDatum(item, item.getId());
+  }
 
-    /**
-     * Pull Recement Media for a user and queues the resulting data. Will try a single call 5 times before failing and
-     * moving on to the next call or returning.
-     * @param user
-     * @throws Exception
-     */
-    @Override
-    protected void collectInstagramDataForUser(User user) throws Exception {
-        Pagination pagination = null;
-        do {
-            int attempts = 0;
-            boolean succesfullDataPull = false;
-            while (!succesfullDataPull && attempts < MAX_ATTEMPTS) {
-                ++attempts;
-                MediaFeed feed = null;
-                try {
-                    if (pagination == null) {
-                        feed = getNextInstagramClient().getRecentMediaFeed(user.getUserId(),
-                                0,
-                                null,
-                                null,
-                                user.getBeforeDate() == null ? null : user.getBeforeDate().toDate(),
-                                user.getAfterDate() == null ? null : user.getAfterDate().toDate());
-                    } else {
-                        feed = getNextInstagramClient().getRecentMediaNextPage(pagination);
-                    }
-                } catch (Exception e) {
-                    if(e instanceof InstagramRateLimitException) {
-                        LOGGER.warn("Received rate limit exception from Instagram, backing off. : {}", e);
-                        this.backOffStrategy.backOff();
-                    } else if(e instanceof InstagramBadRequestException) {
-                        LOGGER.error("Received Bad Requests exception form Instagram: {}", e);
-                        attempts = MAX_ATTEMPTS; //don't repeat bad requests.
-                        ++this.consecutiveErrorCount;
-                    } else {
-                        LOGGER.error("Received Expection while attempting to poll Instagram: {}", e);
-                        ++this.consecutiveErrorCount;
-                    }
-                    if(this.consecutiveErrorCount > Math.max(this.numAvailableTokens(), MAX_ATTEMPTS*2)) {
-                        throw new Exception("InstagramCollector failed to successfully connect to instagram on "+this.consecutiveErrorCount+" attempts.");
-                    }
-                }
-                if(succesfullDataPull = feed != null) {
-                    this.consecutiveErrorCount = 0;
-                    this.backOffStrategy.reset();
-                    pagination = feed.getPagination();
-                    queueData(feed.getData(), user.getUserId());
-                }
-            }
-            if(!succesfullDataPull) {
-                LOGGER.error("Failed to get data from instagram for user id, {}, skipping user.", user.getUserId());
-            }
-        } while (pagination != null && pagination.hasNextPage());
+  /**
+   * Pull Recement Media for a user and queues the resulting data. Will try a single call 5 times before failing and
+   * moving on to the next call or returning.
+   * @param user user
+   * @throws Exception Exception
+   */
+  @Override
+  protected void collectInstagramDataForUser(User user) throws Exception {
+    Pagination pagination = null;
+    do {
+      int attempts = 0;
+      boolean succesfullDataPull = false;
+      while (!succesfullDataPull && attempts < MAX_ATTEMPTS) {
+        ++attempts;
+        MediaFeed feed = null;
+        try {
+          if (pagination == null) {
+            feed = getNextInstagramClient().getRecentMediaFeed(user.getUserId(),
+                0,
+                null,
+                null,
+                user.getBeforeDate() == null ? null : user.getBeforeDate().toDate(),
+                user.getAfterDate() == null ? null : user.getAfterDate().toDate());
+          } else {
+            feed = getNextInstagramClient().getRecentMediaNextPage(pagination);
+          }
+        } catch (Exception ex) {
+          if ( ex instanceof InstagramRateLimitException) {
+            LOGGER.warn("Received rate limit exception from Instagram, backing off. : {}", ex);
+            this.backOffStrategy.backOff();
+          } else if ( ex instanceof InstagramBadRequestException) {
+            LOGGER.error("Received Bad Requests exception form Instagram: {}", ex);
+            attempts = MAX_ATTEMPTS; //don't repeat bad requests.
+            ++this.consecutiveErrorCount;
+          } else {
+            LOGGER.error("Received Expection while attempting to poll Instagram: {}", ex);
+            ++this.consecutiveErrorCount;
+          }
+          if (this.consecutiveErrorCount > Math.max(this.numAvailableTokens(), MAX_ATTEMPTS * 2)) {
+            throw new Exception(
+                "InstagramCollector failed to successfully connect to instagram on " + this.consecutiveErrorCount + " attempts.");
+          }
+        }
+        if (succesfullDataPull = feed != null) {
+          this.consecutiveErrorCount = 0;
+          this.backOffStrategy.reset();
+          pagination = feed.getPagination();
+          queueData(feed.getData(), user.getUserId());
+        }
+      }
+      if (!succesfullDataPull) {
+        LOGGER.error("Failed to get data from instagram for user id, {}, skipping user.", user.getUserId());
+      }
     }
+    while (pagination != null && pagination.hasNextPage());
+  }
 
 
 }


[05/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueMultiThreadTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueMultiThreadTest.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueMultiThreadTest.java
index 60df89c..ad4aa28 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueMultiThreadTest.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueMultiThreadTest.java
@@ -17,299 +17,304 @@
  */
 package org.apache.streams.local.queues;
 
+import org.apache.streams.util.ComponentUtils;
+
 import com.carrotsearch.randomizedtesting.RandomizedTest;
 import com.carrotsearch.randomizedtesting.annotations.Repeat;
-import org.apache.streams.util.ComponentUtils;
 import org.joda.time.DateTime;
 import org.junit.After;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.lang.management.ManagementFactory;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
 import javax.management.InstanceNotFoundException;
 import javax.management.ObjectName;
-import java.lang.management.ManagementFactory;
-import java.util.concurrent.*;
 
 /**
  * MultiThread unit tests for {@link org.apache.streams.local.queues.ThroughputQueue}
  */
 public class ThroughputQueueMultiThreadTest extends RandomizedTest {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(ThroughputQueueMultiThreadTest.class);
-    private static final String MBEAN_ID = "testQueue";
-    private static final String STREAM_ID = "test_stream";
-    private static long STREAM_START_TIME = (new DateTime()).getMillis();
-
-    /**
-     * Remove registered mbeans from previous tests
-     * @throws Exception
-     */
-    @After
-    public void unregisterMXBean() throws Exception {
-        try {
-            ManagementFactory.getPlatformMBeanServer().unregisterMBean(new ObjectName(String.format(ThroughputQueue.NAME_TEMPLATE, MBEAN_ID, STREAM_ID, STREAM_START_TIME)));
-        } catch (InstanceNotFoundException ife) {
-            //No-op
-        }
+  private static final Logger LOGGER = LoggerFactory.getLogger(ThroughputQueueMultiThreadTest.class);
+  private static final String MBEAN_ID = "testQueue";
+  private static final String STREAM_ID = "test_stream";
+  private static long STREAM_START_TIME = (new DateTime()).getMillis();
+
+  /**
+   * Remove registered mbeans from previous tests
+   * @throws Exception
+   */
+  @After
+  public void unregisterMXBean() throws Exception {
+    try {
+      ManagementFactory.getPlatformMBeanServer().unregisterMBean(new ObjectName(String.format(ThroughputQueue.NAME_TEMPLATE, MBEAN_ID, STREAM_ID, STREAM_START_TIME)));
+    } catch (InstanceNotFoundException ife) {
+      //No-op
     }
-
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
+  }
+
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
-
-
-    /**
-     * Test that queue will block on puts when the queue is full
-     * @throws InterruptedException
-     */
-    @Test
-    public void testBlockOnFullQueue() throws InterruptedException {
-        int queueSize = randomIntBetween(1, 3000);
-        ExecutorService executor = Executors.newSingleThreadExecutor();
-        CountDownLatch full = new CountDownLatch(1);
-        CountDownLatch finished = new CountDownLatch(1);
-        ThroughputQueue queue = new ThroughputQueue(queueSize);
-        BlocksOnFullQueue testThread = new BlocksOnFullQueue(full, finished, queue, queueSize);
-        executor.submit(testThread);
-        full.await();
-        assertEquals(queueSize, queue.size());
-        assertEquals(queueSize, queue.getCurrentSize());
-        assertFalse(testThread.isComplete()); //test that it is blocked
-        safeSleep(1000);
-        assertFalse(testThread.isComplete()); //still blocked
-        queue.take();
-        finished.await();
-        assertEquals(queueSize, queue.size());
-        assertEquals(queueSize, queue.getCurrentSize());
-        assertTrue(testThread.isComplete());
-        executor.shutdownNow();
-        executor.awaitTermination(500, TimeUnit.MILLISECONDS);
+  }
+
+
+  /**
+   * Test that queue will block on puts when the queue is full
+   * @throws InterruptedException
+   */
+  @Test
+  public void testBlockOnFullQueue() throws InterruptedException {
+    int queueSize = randomIntBetween(1, 3000);
+    ExecutorService executor = Executors.newSingleThreadExecutor();
+    CountDownLatch full = new CountDownLatch(1);
+    CountDownLatch finished = new CountDownLatch(1);
+    ThroughputQueue queue = new ThroughputQueue(queueSize);
+    BlocksOnFullQueue testThread = new BlocksOnFullQueue(full, finished, queue, queueSize);
+    executor.submit(testThread);
+    full.await();
+    assertEquals(queueSize, queue.size());
+    assertEquals(queueSize, queue.getCurrentSize());
+    assertFalse(testThread.isComplete()); //test that it is blocked
+    safeSleep(1000);
+    assertFalse(testThread.isComplete()); //still blocked
+    queue.take();
+    finished.await();
+    assertEquals(queueSize, queue.size());
+    assertEquals(queueSize, queue.getCurrentSize());
+    assertTrue(testThread.isComplete());
+    executor.shutdownNow();
+    executor.awaitTermination(500, TimeUnit.MILLISECONDS);
+  }
+
+  /**
+   * Test that queue will block on Take when queue is empty
+   * @throws InterruptedException
+   */
+  @Test
+  public void testBlockOnEmptyQueue() throws InterruptedException {
+    int queueSize = randomIntBetween(1, 3000);
+    ExecutorService executor = Executors.newSingleThreadExecutor();
+    CountDownLatch empty = new CountDownLatch(1);
+    CountDownLatch finished = new CountDownLatch(1);
+    ThroughputQueue queue = new ThroughputQueue();
+    BlocksOnEmptyQueue testThread = new BlocksOnEmptyQueue(empty, finished, queueSize, queue);
+    for(int i=0; i < queueSize; ++i) {
+      queue.put(i);
     }
-
-    /**
-     * Test that queue will block on Take when queue is empty
-     * @throws InterruptedException
-     */
-    @Test
-    public void testBlockOnEmptyQueue() throws InterruptedException {
-        int queueSize = randomIntBetween(1, 3000);
-        ExecutorService executor = Executors.newSingleThreadExecutor();
-        CountDownLatch empty = new CountDownLatch(1);
-        CountDownLatch finished = new CountDownLatch(1);
-        ThroughputQueue queue = new ThroughputQueue();
-        BlocksOnEmptyQueue testThread = new BlocksOnEmptyQueue(empty, finished, queueSize, queue);
-        for(int i=0; i < queueSize; ++i) {
-            queue.put(i);
-        }
-        executor.submit(testThread);
-        empty.await();
-        assertEquals(0, queue.size());
-        assertEquals(0, queue.getCurrentSize());
-        assertFalse(testThread.isComplete());
-        safeSleep(1000);
-        assertFalse(testThread.isComplete());
-        queue.put(1);
-        finished.await();
-        assertEquals(0, queue.size());
-        assertEquals(0, queue.getCurrentSize());
-        assertTrue(testThread.isComplete());
-        executor.shutdownNow();
-        executor.awaitTermination(500, TimeUnit.MILLISECONDS);
+    executor.submit(testThread);
+    empty.await();
+    assertEquals(0, queue.size());
+    assertEquals(0, queue.getCurrentSize());
+    assertFalse(testThread.isComplete());
+    safeSleep(1000);
+    assertFalse(testThread.isComplete());
+    queue.put(1);
+    finished.await();
+    assertEquals(0, queue.size());
+    assertEquals(0, queue.getCurrentSize());
+    assertTrue(testThread.isComplete());
+    executor.shutdownNow();
+    executor.awaitTermination(500, TimeUnit.MILLISECONDS);
+  }
+
+
+  /**
+   * Test multiple threads putting and taking from the queue while
+   * this thread repeatedly calls the MXBean measurement methods.
+   * Should hammer the queue with request from multiple threads
+   * of all request types.  Purpose is to expose current modification exceptions
+   * and/or dead locks.
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testMultiThreadAccessAndInteruptResponse() throws Exception {
+    int putTakeThreadCount = randomIntBetween(1, 10);
+    int dataCount = randomIntBetween(1, 2000000);
+    int pollCount = randomIntBetween(1, 2000000);
+    int maxSize = randomIntBetween(1, 1000);
+    CountDownLatch finished = new CountDownLatch(putTakeThreadCount);
+    ThroughputQueue queue = new ThroughputQueue(maxSize, MBEAN_ID);
+    ExecutorService executor = Executors.newFixedThreadPool(putTakeThreadCount * 2);
+    for(int i=0; i < putTakeThreadCount; ++i) {
+      executor.submit(new PutData(finished, queue, dataCount));
+      executor.submit(new TakeData(queue));
     }
-
-
-    /**
-     * Test multiple threads putting and taking from the queue while
-     * this thread repeatedly calls the MXBean measurement methods.
-     * Should hammer the queue with request from multiple threads
-     * of all request types.  Purpose is to expose current modification exceptions
-     * and/or dead locks.
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testMultiThreadAccessAndInteruptResponse() throws Exception {
-        int putTakeThreadCount = randomIntBetween(1, 10);
-        int dataCount = randomIntBetween(1, 2000000);
-        int pollCount = randomIntBetween(1, 2000000);
-        int maxSize = randomIntBetween(1, 1000);
-        CountDownLatch finished = new CountDownLatch(putTakeThreadCount);
-        ThroughputQueue queue = new ThroughputQueue(maxSize, MBEAN_ID);
-        ExecutorService executor = Executors.newFixedThreadPool(putTakeThreadCount * 2);
-        for(int i=0; i < putTakeThreadCount; ++i) {
-            executor.submit(new PutData(finished, queue, dataCount));
-            executor.submit(new TakeData(queue));
-        }
-        for(int i=0; i < pollCount; ++i) {
-            queue.getAvgWait();
-            queue.getAdded();
-            queue.getCurrentSize();
-            queue.getMaxWait();
-            queue.getRemoved();
-            queue.getThroughput();
-        }
-        finished.await();
-        while(!queue.isEmpty()) {
-            LOGGER.info("Waiting for queue to be emptied...");
-            safeSleep(500);
-        }
-        long totalData = ((long) dataCount) * putTakeThreadCount;
-        assertEquals(totalData, queue.getAdded());
-        assertEquals(totalData, queue.getRemoved());
-        executor.shutdown();
-        executor.awaitTermination(1000, TimeUnit.MILLISECONDS); //shutdown puts
-        executor.shutdownNow();
-        executor.awaitTermination(1000, TimeUnit.MILLISECONDS); //shutdown takes
-        //Randomized should not report thread leak
+    for(int i=0; i < pollCount; ++i) {
+      queue.getAvgWait();
+      queue.getAdded();
+      queue.getCurrentSize();
+      queue.getMaxWait();
+      queue.getRemoved();
+      queue.getThroughput();
     }
-
-
-
-    private void safeSleep(long sleep) {
-        try {
-            Thread.sleep(sleep);
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
+    finished.await();
+    while(!queue.isEmpty()) {
+      LOGGER.info("Waiting for queue to be emptied...");
+      safeSleep(500);
     }
+    long totalData = ((long) dataCount) * putTakeThreadCount;
+    assertEquals(totalData, queue.getAdded());
+    assertEquals(totalData, queue.getRemoved());
+    executor.shutdown();
+    executor.awaitTermination(1000, TimeUnit.MILLISECONDS); //shutdown puts
+    executor.shutdownNow();
+    executor.awaitTermination(1000, TimeUnit.MILLISECONDS); //shutdown takes
+    //Randomized should not report thread leak
+  }
+
+
+
+  private void safeSleep(long sleep) {
+    try {
+      Thread.sleep(sleep);
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
+    }
+  }
 
 
 
 
-    /**
-     * Helper runnable for test {@link ThroughputQueueMultiThreadTest#testBlockOnFullQueue()}
-     */
-    private class BlocksOnFullQueue implements Runnable {
-
-        private CountDownLatch full;
-        volatile private boolean complete;
-        private int queueSize;
-        private CountDownLatch finished;
-        private BlockingQueue queue;
+  /**
+   * Helper runnable for test {@link ThroughputQueueMultiThreadTest#testBlockOnFullQueue()}
+   */
+  private class BlocksOnFullQueue implements Runnable {
 
-        public BlocksOnFullQueue(CountDownLatch latch, CountDownLatch finished, BlockingQueue queue, int queueSize) {
-            this.full = latch;
-            this.complete = false;
-            this.queueSize = queueSize;
-            this.finished = finished;
-            this.queue = queue;
-        }
+    private CountDownLatch full;
+    volatile private boolean complete;
+    private int queueSize;
+    private CountDownLatch finished;
+    private BlockingQueue queue;
 
-        @Override
-        public void run() {
-            try {
-                for (int i = 0; i < this.queueSize; ++i) {
-                    this.queue.put(i);
-                }
-                this.full.countDown();
-                this.queue.put(0);
-                this.complete = true;
-                this.finished.countDown();
-            } catch (InterruptedException ie) {
-                Thread.currentThread().interrupt();
-            }
-        }
+    public BlocksOnFullQueue(CountDownLatch latch, CountDownLatch finished, BlockingQueue queue, int queueSize) {
+      this.full = latch;
+      this.complete = false;
+      this.queueSize = queueSize;
+      this.finished = finished;
+      this.queue = queue;
+    }
 
-        public boolean isComplete() {
-            return this.complete;
+    @Override
+    public void run() {
+      try {
+        for (int i = 0; i < this.queueSize; ++i) {
+          this.queue.put(i);
         }
+        this.full.countDown();
+        this.queue.put(0);
+        this.complete = true;
+        this.finished.countDown();
+      } catch (InterruptedException ie) {
+        Thread.currentThread().interrupt();
+      }
     }
 
-
-    /**
-     * Helper runnable class for test {@link ThroughputQueueMultiThreadTest#testBlockOnEmptyQueue()}
-     */
-    private class BlocksOnEmptyQueue implements Runnable {
-
-        private CountDownLatch full;
-        volatile private boolean complete;
-        private int queueSize;
-        private CountDownLatch finished;
-        private BlockingQueue queue;
-
-        public BlocksOnEmptyQueue(CountDownLatch full, CountDownLatch finished, int queueSize, BlockingQueue queue) {
-            this.full = full;
-            this.finished = finished;
-            this.queueSize = queueSize;
-            this.queue = queue;
-            this.complete = false;
-        }
+    public boolean isComplete() {
+      return this.complete;
+    }
+  }
+
+
+  /**
+   * Helper runnable class for test {@link ThroughputQueueMultiThreadTest#testBlockOnEmptyQueue()}
+   */
+  private class BlocksOnEmptyQueue implements Runnable {
+
+    private CountDownLatch full;
+    volatile private boolean complete;
+    private int queueSize;
+    private CountDownLatch finished;
+    private BlockingQueue queue;
+
+    public BlocksOnEmptyQueue(CountDownLatch full, CountDownLatch finished, int queueSize, BlockingQueue queue) {
+      this.full = full;
+      this.finished = finished;
+      this.queueSize = queueSize;
+      this.queue = queue;
+      this.complete = false;
+    }
 
 
-        @Override
-        public void run() {
-            try {
-                for(int i=0; i < this.queueSize; ++i) {
-                    this.queue.take();
-                }
-                this.full.countDown();
-                this.queue.take();
-                this.complete = true;
-                this.finished.countDown();
-            } catch (InterruptedException ie) {
-                Thread.currentThread().interrupt();
-            }
+    @Override
+    public void run() {
+      try {
+        for(int i=0; i < this.queueSize; ++i) {
+          this.queue.take();
         }
+        this.full.countDown();
+        this.queue.take();
+        this.complete = true;
+        this.finished.countDown();
+      } catch (InterruptedException ie) {
+        Thread.currentThread().interrupt();
+      }
+    }
 
-        public boolean isComplete() {
-            return this.complete;
-        }
+    public boolean isComplete() {
+      return this.complete;
     }
+  }
 
 
-    private class PutData implements Runnable {
+  private class PutData implements Runnable {
 
-        private BlockingQueue queue;
-        private int dataCount;
-        private CountDownLatch finished;
+    private BlockingQueue queue;
+    private int dataCount;
+    private CountDownLatch finished;
 
-        public PutData(CountDownLatch finished, BlockingQueue queue, int dataCount) {
-            this.queue = queue;
-            this.dataCount = dataCount;
-            this.finished = finished;
-        }
+    public PutData(CountDownLatch finished, BlockingQueue queue, int dataCount) {
+      this.queue = queue;
+      this.dataCount = dataCount;
+      this.finished = finished;
+    }
 
 
-        @Override
-        public void run() {
-            try {
-                for(int i=0; i < this.dataCount; ++i) {
-                    this.queue.put(i);
-                }
-            } catch (InterruptedException ie) {
-                LOGGER.error("PUT DATA interupted !");
-                Thread.currentThread().interrupt();
-            }
-            this.finished.countDown();
+    @Override
+    public void run() {
+      try {
+        for(int i=0; i < this.dataCount; ++i) {
+          this.queue.put(i);
         }
+      } catch (InterruptedException ie) {
+        LOGGER.error("PUT DATA interupted !");
+        Thread.currentThread().interrupt();
+      }
+      this.finished.countDown();
     }
+  }
 
 
-    private class TakeData implements Runnable {
+  private class TakeData implements Runnable {
 
-        private BlockingQueue queue;
+    private BlockingQueue queue;
 
-        public TakeData(BlockingQueue queue) {
-            this.queue = queue;
-        }
+    public TakeData(BlockingQueue queue) {
+      this.queue = queue;
+    }
 
 
-        @Override
-        public void run() {
-            try {
-                while(true) {
-                    this.queue.take();
-                }
-            } catch (InterruptedException ie) {
-                LOGGER.error("PUT DATA interupted !");
-                Thread.currentThread().interrupt();
-            }
+    @Override
+    public void run() {
+      try {
+        while(true) {
+          this.queue.take();
         }
+      } catch (InterruptedException ie) {
+        LOGGER.error("PUT DATA interupted !");
+        Thread.currentThread().interrupt();
+      }
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueSingleThreadTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueSingleThreadTest.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueSingleThreadTest.java
index 8c7f5c5..afe1911 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueSingleThreadTest.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/queues/ThroughputQueueSingleThreadTest.java
@@ -17,234 +17,235 @@
  */
 package org.apache.streams.local.queues;
 
+import org.apache.streams.util.ComponentUtils;
+
 import com.carrotsearch.randomizedtesting.RandomizedTest;
 import com.carrotsearch.randomizedtesting.annotations.Repeat;
-import org.apache.streams.util.ComponentUtils;
 import org.joda.time.DateTime;
 import org.junit.After;
 import org.junit.Test;
 
+import java.lang.management.ManagementFactory;
 import javax.management.MBeanServer;
 import javax.management.ObjectInstance;
 import javax.management.ObjectName;
-import java.lang.management.ManagementFactory;
 
 /**
  * Single thread unit tests for {@link org.apache.streams.local.queues.ThroughputQueue}
  */
 public class ThroughputQueueSingleThreadTest extends RandomizedTest {
-    private static final String MBEAN_ID = "test_id";
-    private static final String STREAM_ID = "test_stream";
-    private static long STREAM_START_TIME = (new DateTime()).getMillis();
-
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
+  private static final String MBEAN_ID = "test_id";
+  private static final String STREAM_ID = "test_stream";
+  private static long STREAM_START_TIME = (new DateTime()).getMillis();
+
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
-
-    /**
-     * Test that take and put queue and dequeue data as expected and all
-     * measurements form the queue are returning data.
-     * @throws Exception
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testTakeAndPut() throws Exception {
-        ThroughputQueue<Integer> queue = new ThroughputQueue<>();
-        int putCount = randomIntBetween(1, 1000);
-        for(int i=0; i < putCount; ++i) {
-            queue.put(i);
-            assertEquals(i+1, queue.size());
-            assertEquals(queue.size(), queue.getCurrentSize());
-        }
-        safeSleep(100); //ensure measurable wait time
-        int takeCount = randomIntBetween(1, putCount);
-        for(int i=0; i < takeCount; ++i) {
-            Integer element = queue.take();
-            assertNotNull(element);
-            assertEquals(i, element.intValue());
-            assertEquals(putCount - (1+i), queue.size());
-            assertEquals(queue.size(), queue.getCurrentSize());
-        }
-        assertEquals(putCount-takeCount, queue.size());
-        assertEquals(queue.size(), queue.getCurrentSize());
-        assertTrue(0.0 < queue.getMaxWait());
-        assertTrue(0.0 < queue.getAvgWait());
-        assertTrue(0.0 < queue.getThroughput());
-        assertEquals(putCount, queue.getAdded());
-        assertEquals(takeCount, queue.getRemoved());
+  }
+
+  /**
+   * Test that take and put queue and dequeue data as expected and all
+   * measurements form the queue are returning data.
+   * @throws Exception
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testTakeAndPut() throws Exception {
+    ThroughputQueue<Integer> queue = new ThroughputQueue<>();
+    int putCount = randomIntBetween(1, 1000);
+    for(int i=0; i < putCount; ++i) {
+      queue.put(i);
+      assertEquals(i+1, queue.size());
+      assertEquals(queue.size(), queue.getCurrentSize());
     }
-
-    /**
-     * Test that add and remove queue and dequeue data as expected
-     * and all measurements from the queue are returning data
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testAddAndRemove() {
-        ThroughputQueue<Integer> queue = new ThroughputQueue<>();
-        int putCount = randomIntBetween(1, 1000);
-        for(int i=0; i < putCount; ++i) {
-            queue.add(i);
-            assertEquals(i+1, queue.size());
-            assertEquals(queue.size(), queue.getCurrentSize());
-        }
-        safeSleep(100); //ensure measurable wait time
-        int takeCount = randomIntBetween(1, putCount);
-        for(int i=0; i < takeCount; ++i) {
-            Integer element = queue.remove();
-            assertNotNull(element);
-            assertEquals(i, element.intValue());
-            assertEquals(putCount - (1+i), queue.size());
-            assertEquals(queue.size(), queue.getCurrentSize());
-        }
-        assertEquals(putCount-takeCount, queue.size());
-        assertEquals(queue.size(), queue.getCurrentSize());
-        assertTrue(0.0 < queue.getMaxWait());
-        assertTrue(0.0 < queue.getAvgWait());
-        assertTrue(0.0 < queue.getThroughput());
-        assertEquals(putCount, queue.getAdded());
-        assertEquals(takeCount, queue.getRemoved());
+    safeSleep(100); //ensure measurable wait time
+    int takeCount = randomIntBetween(1, putCount);
+    for(int i=0; i < takeCount; ++i) {
+      Integer element = queue.take();
+      assertNotNull(element);
+      assertEquals(i, element.intValue());
+      assertEquals(putCount - (1+i), queue.size());
+      assertEquals(queue.size(), queue.getCurrentSize());
     }
-
-    /**
-     * Test that offer and poll queue and dequeue data as expected
-     * and all measurements from the queue are returning data
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testOfferAndPoll() {
-        ThroughputQueue<Integer> queue = new ThroughputQueue<>();
-        int putCount = randomIntBetween(1, 1000);
-        for(int i=0; i < putCount; ++i) {
-            queue.offer(i);
-            assertEquals(i+1, queue.size());
-            assertEquals(queue.size(), queue.getCurrentSize());
-        }
-        safeSleep(100); //ensure measurable wait time
-        int takeCount = randomIntBetween(1, putCount);
-        for(int i=0; i < takeCount; ++i) {
-            Integer element = queue.poll();
-            assertNotNull(element);
-            assertEquals(i, element.intValue());
-            assertEquals(putCount - (1+i), queue.size());
-            assertEquals(queue.size(), queue.getCurrentSize());
-        }
-        assertEquals(putCount-takeCount, queue.size());
-        assertEquals(queue.size(), queue.getCurrentSize());
-        assertTrue(0.0 < queue.getMaxWait());
-        assertTrue(0.0 < queue.getAvgWait());
-        assertTrue(0.0 < queue.getThroughput());
-        assertEquals(putCount, queue.getAdded());
-        assertEquals(takeCount, queue.getRemoved());
+    assertEquals(putCount-takeCount, queue.size());
+    assertEquals(queue.size(), queue.getCurrentSize());
+    assertTrue(0.0 < queue.getMaxWait());
+    assertTrue(0.0 < queue.getAvgWait());
+    assertTrue(0.0 < queue.getThroughput());
+    assertEquals(putCount, queue.getAdded());
+    assertEquals(takeCount, queue.getRemoved());
+  }
+
+  /**
+   * Test that add and remove queue and dequeue data as expected
+   * and all measurements from the queue are returning data
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testAddAndRemove() {
+    ThroughputQueue<Integer> queue = new ThroughputQueue<>();
+    int putCount = randomIntBetween(1, 1000);
+    for(int i=0; i < putCount; ++i) {
+      queue.add(i);
+      assertEquals(i+1, queue.size());
+      assertEquals(queue.size(), queue.getCurrentSize());
     }
-
-
-
-    /**
-     * Test that max wait and avg wait return expected values
-     * @throws Exception
-     */
-    @Test
-    public void testWait() throws Exception {
-        ThroughputQueue queue = new ThroughputQueue();
-        int wait = 1000;
-
-        for(int i=0; i < 3; ++i) {
-            queue.put(1);
-            safeSleep(wait);
-            queue.take();
-            assertTrue(queue.getMaxWait() >= wait && queue.getMaxWait() <= (wait * 2));//can't calculate exactly, making sure its close.
-            assertTrue(queue.getAvgWait() >= wait && queue.getAvgWait() <= (wait * 2));
-        }
-        queue.put(1);
-        queue.take();
-        assertTrue(queue.getMaxWait() >= wait && queue.getMaxWait() <= (wait * 2));//can't calculate exactly, making sure its close.
-        assertTrue(queue.getAvgWait() <= 5000 );
-        assertTrue(queue.getAvgWait() >= 500);
+    safeSleep(100); //ensure measurable wait time
+    int takeCount = randomIntBetween(1, putCount);
+    for(int i=0; i < takeCount; ++i) {
+      Integer element = queue.remove();
+      assertNotNull(element);
+      assertEquals(i, element.intValue());
+      assertEquals(putCount - (1+i), queue.size());
+      assertEquals(queue.size(), queue.getCurrentSize());
     }
-
-    /**
-     * Test that throughput returns expected values.
-     * @throws Exception
-     */
-    @Test
-    public void testThroughput() throws Exception {
-        ThroughputQueue queue = new ThroughputQueue();
-        int wait = 100;
-        for(int i=0; i < 10; ++i) {
-            queue.put(1);
-            safeSleep(wait);
-            queue.take();
-        }
-        double throughput = queue.getThroughput();
-        assertTrue(throughput <= 15 ); //can't calculate exactly, making sure its close.
-        assertTrue(throughput >= 5);
-
-        queue = new ThroughputQueue();
-        wait = 1000;
-        for(int i=0; i < 10; ++i) {
-            queue.put(1);
-        }
-        for(int i=0; i < 10; ++i) {
-            queue.take();
-        }
-        safeSleep(wait);
-        throughput = queue.getThroughput();
-        assertTrue(throughput <= 15 ); //can't calculate exactly, making sure its close.
-        assertTrue(throughput >= 5);
+    assertEquals(putCount-takeCount, queue.size());
+    assertEquals(queue.size(), queue.getCurrentSize());
+    assertTrue(0.0 < queue.getMaxWait());
+    assertTrue(0.0 < queue.getAvgWait());
+    assertTrue(0.0 < queue.getThroughput());
+    assertEquals(putCount, queue.getAdded());
+    assertEquals(takeCount, queue.getRemoved());
+  }
+
+  /**
+   * Test that offer and poll queue and dequeue data as expected
+   * and all measurements from the queue are returning data
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testOfferAndPoll() {
+    ThroughputQueue<Integer> queue = new ThroughputQueue<>();
+    int putCount = randomIntBetween(1, 1000);
+    for(int i=0; i < putCount; ++i) {
+      queue.offer(i);
+      assertEquals(i+1, queue.size());
+      assertEquals(queue.size(), queue.getCurrentSize());
     }
-
-
-    /**
-     * Test that the mbean registers
-     */
-    @Test
-    public void testMBeanRegistration() {
-        try {
-            MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-            Integer beanCount = mbs.getMBeanCount();
-            ThroughputQueue queue = new ThroughputQueue(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-            assertEquals("Expected bean to be registered", new Integer(beanCount+1), mbs.getMBeanCount());
-            ObjectInstance mBean = mbs.getObjectInstance(new ObjectName(String.format(ThroughputQueue.NAME_TEMPLATE, MBEAN_ID, STREAM_ID, STREAM_START_TIME)));
-            assertNotNull(mBean);
-        } catch (Exception e) {
-            fail("Failed to register MXBean : "+e.getMessage());
-        }
+    safeSleep(100); //ensure measurable wait time
+    int takeCount = randomIntBetween(1, putCount);
+    for(int i=0; i < takeCount; ++i) {
+      Integer element = queue.poll();
+      assertNotNull(element);
+      assertEquals(i, element.intValue());
+      assertEquals(putCount - (1+i), queue.size());
+      assertEquals(queue.size(), queue.getCurrentSize());
     }
-
-    /**
-     * Test that mulitple mbeans of the same type with a different name can be registered
-     */
-    @Test
-    public void testMultipleMBeanRegistrations() {
-        try {
-            MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-            Integer beanCount = mbs.getMBeanCount();
-            int numReg = randomIntBetween(2, 100);
-            for(int i=0; i < numReg; ++i) {
-                ThroughputQueue queue = new ThroughputQueue(MBEAN_ID + "" + i, STREAM_ID, STREAM_START_TIME);
-                assertEquals("Expected bean to be registered", new Integer(beanCount + (i+1)), mbs.getMBeanCount());
-                ObjectInstance mBean = mbs.getObjectInstance(new ObjectName(String.format(ThroughputQueue.NAME_TEMPLATE, MBEAN_ID + "" + i, STREAM_ID, STREAM_START_TIME)));
-                assertNotNull(mBean);
-            }
-        } catch (Exception e) {
-            fail("Failed to register MXBean : "+e.getMessage());
-        }
+    assertEquals(putCount-takeCount, queue.size());
+    assertEquals(queue.size(), queue.getCurrentSize());
+    assertTrue(0.0 < queue.getMaxWait());
+    assertTrue(0.0 < queue.getAvgWait());
+    assertTrue(0.0 < queue.getThroughput());
+    assertEquals(putCount, queue.getAdded());
+    assertEquals(takeCount, queue.getRemoved());
+  }
+
+
+
+  /**
+   * Test that max wait and avg wait return expected values
+   * @throws Exception
+   */
+  @Test
+  public void testWait() throws Exception {
+    ThroughputQueue queue = new ThroughputQueue();
+    int wait = 1000;
+
+    for(int i=0; i < 3; ++i) {
+      queue.put(1);
+      safeSleep(wait);
+      queue.take();
+      assertTrue(queue.getMaxWait() >= wait && queue.getMaxWait() <= (wait * 2));//can't calculate exactly, making sure its close.
+      assertTrue(queue.getAvgWait() >= wait && queue.getAvgWait() <= (wait * 2));
+    }
+    queue.put(1);
+    queue.take();
+    assertTrue(queue.getMaxWait() >= wait && queue.getMaxWait() <= (wait * 2));//can't calculate exactly, making sure its close.
+    assertTrue(queue.getAvgWait() <= 5000 );
+    assertTrue(queue.getAvgWait() >= 500);
+  }
+
+  /**
+   * Test that throughput returns expected values.
+   * @throws Exception
+   */
+  @Test
+  public void testThroughput() throws Exception {
+    ThroughputQueue queue = new ThroughputQueue();
+    int wait = 100;
+    for(int i=0; i < 10; ++i) {
+      queue.put(1);
+      safeSleep(wait);
+      queue.take();
+    }
+    double throughput = queue.getThroughput();
+    assertTrue(throughput <= 15 ); //can't calculate exactly, making sure its close.
+    assertTrue(throughput >= 5);
+
+    queue = new ThroughputQueue();
+    wait = 1000;
+    for(int i=0; i < 10; ++i) {
+      queue.put(1);
+    }
+    for(int i=0; i < 10; ++i) {
+      queue.take();
+    }
+    safeSleep(wait);
+    throughput = queue.getThroughput();
+    assertTrue(throughput <= 15 ); //can't calculate exactly, making sure its close.
+    assertTrue(throughput >= 5);
+  }
+
+
+  /**
+   * Test that the mbean registers
+   */
+  @Test
+  public void testMBeanRegistration() {
+    try {
+      MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+      Integer beanCount = mbs.getMBeanCount();
+      ThroughputQueue queue = new ThroughputQueue(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+      assertEquals("Expected bean to be registered", new Integer(beanCount+1), mbs.getMBeanCount());
+      ObjectInstance mBean = mbs.getObjectInstance(new ObjectName(String.format(ThroughputQueue.NAME_TEMPLATE, MBEAN_ID, STREAM_ID, STREAM_START_TIME)));
+      assertNotNull(mBean);
+    } catch (Exception e) {
+      fail("Failed to register MXBean : "+e.getMessage());
+    }
+  }
+
+  /**
+   * Test that mulitple mbeans of the same type with a different name can be registered
+   */
+  @Test
+  public void testMultipleMBeanRegistrations() {
+    try {
+      MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+      Integer beanCount = mbs.getMBeanCount();
+      int numReg = randomIntBetween(2, 100);
+      for(int i=0; i < numReg; ++i) {
+        ThroughputQueue queue = new ThroughputQueue(MBEAN_ID + "" + i, STREAM_ID, STREAM_START_TIME);
+        assertEquals("Expected bean to be registered", new Integer(beanCount + (i+1)), mbs.getMBeanCount());
+        ObjectInstance mBean = mbs.getObjectInstance(new ObjectName(String.format(ThroughputQueue.NAME_TEMPLATE, MBEAN_ID + "" + i, STREAM_ID, STREAM_START_TIME)));
+        assertNotNull(mBean);
+      }
+    } catch (Exception e) {
+      fail("Failed to register MXBean : "+e.getMessage());
     }
+  }
 
 
-    private void safeSleep(long sleep) {
-        try {
-            Thread.sleep(sleep);
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
+  private void safeSleep(long sleep) {
+    try {
+      Thread.sleep(sleep);
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
     }
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/BasicTasksTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/BasicTasksTest.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/BasicTasksTest.java
index 38e948e..2a67550 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/BasicTasksTest.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/BasicTasksTest.java
@@ -18,25 +18,28 @@
 
 package org.apache.streams.local.tasks;
 
-import com.google.common.util.concurrent.Uninterruptibles;
 import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.local.counters.DatumStatusCounter;
 import org.apache.streams.local.counters.StreamsTaskCounter;
-import org.apache.streams.local.queues.ThroughputQueue;
 import org.apache.streams.local.test.processors.PassthroughDatumCounterProcessor;
 import org.apache.streams.local.test.providers.NumericMessageProvider;
 import org.apache.streams.local.test.writer.DatumCounterWriter;
 import org.apache.streams.util.ComponentUtils;
+
+import com.google.common.util.concurrent.Uninterruptibles;
 import org.junit.After;
 import org.junit.Test;
 
-import javax.management.InstanceNotFoundException;
-import javax.management.ObjectName;
-import java.lang.management.ManagementFactory;
-import java.util.Queue;
-import java.util.concurrent.*;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 /**
  *
@@ -44,264 +47,264 @@ import static org.junit.Assert.*;
 public class BasicTasksTest {
 
 
-    private static final String MBEAN_ID = "test_bean";
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
+  private static final String MBEAN_ID = "test_bean";
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
+  }
 
-    @Test
-    public void testProviderTask() {
-        int numMessages = 100;
-        NumericMessageProvider provider = new NumericMessageProvider(numMessages);
-        StreamsProviderTask task = new StreamsProviderTask(provider, false, null);
-        BlockingQueue<StreamsDatum> outQueue = new LinkedBlockingQueue<>();
-        task.addOutputQueue(outQueue);
-        //Test that adding input queues to providers is not valid
-        BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
-        Exception exp = null;
-        try {
-            task.addInputQueue(inQueue);
-        } catch (UnsupportedOperationException uoe) {
-            exp = uoe;
-        }
-        assertNotNull(exp);
+  @Test
+  public void testProviderTask() {
+    int numMessages = 100;
+    NumericMessageProvider provider = new NumericMessageProvider(numMessages);
+    StreamsProviderTask task = new StreamsProviderTask(provider, false, null);
+    BlockingQueue<StreamsDatum> outQueue = new LinkedBlockingQueue<>();
+    task.addOutputQueue(outQueue);
+    //Test that adding input queues to providers is not valid
+    BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
+    Exception exp = null;
+    try {
+      task.addInputQueue(inQueue);
+    } catch (UnsupportedOperationException uoe) {
+      exp = uoe;
+    }
+    assertNotNull(exp);
 
-        ExecutorService service = Executors.newFixedThreadPool(1);
-        service.submit(task);
-        int attempts = 0;
-        while(outQueue.size() != numMessages) {
-            Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
-            if(attempts == 10) {
-                fail("Provider task failed to output "+numMessages+" in a timely fashion.");
-            }
-        }
-        service.shutdown();
-        try {
-            if(!service.awaitTermination(10, TimeUnit.SECONDS)){
-                service.shutdownNow();
-                fail("Service did not terminate.");
-            }
-            assertTrue("Task should have completed running in allotted time.", service.isTerminated());
-        } catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-        }
+    ExecutorService service = Executors.newFixedThreadPool(1);
+    service.submit(task);
+    int attempts = 0;
+    while(outQueue.size() != numMessages) {
+      Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
+      if(attempts == 10) {
+        fail("Provider task failed to output "+numMessages+" in a timely fashion.");
+      }
+    }
+    service.shutdown();
+    try {
+      if(!service.awaitTermination(10, TimeUnit.SECONDS)){
+        service.shutdownNow();
+        fail("Service did not terminate.");
+      }
+      assertTrue("Task should have completed running in allotted time.", service.isTerminated());
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
     }
+  }
 
-    @Test
-    public void testProcessorTask() {
-        int numMessages = 100;
-        PassthroughDatumCounterProcessor processor = new PassthroughDatumCounterProcessor("");
-        StreamsProcessorTask task = new StreamsProcessorTask(processor);
-        StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, null, -1);
-        task.setStreamsTaskCounter(counter);
-        BlockingQueue<StreamsDatum> outQueue = new LinkedBlockingQueue<>();
-        BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
-        task.addOutputQueue(outQueue);
-        task.addInputQueue(inQueue);
-        assertEquals(numMessages, task.getInputQueues().get(0).size());
-        ExecutorService service = Executors.newFixedThreadPool(1);
-        service.submit(task);
-        int attempts = 0;
-        while(inQueue.size() != 0 && outQueue.size() != numMessages) {
-            Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
-            ++attempts;
-            if(attempts == 10) {
-                fail("Processor task failed to output "+numMessages+" in a timely fashion.");
-            }
-        }
-        task.stopTask();;
-        service.shutdown();
-        try {
-            if(!service.awaitTermination(5, TimeUnit.SECONDS)){
-                service.shutdownNow();
-                fail("Service did not terminate.");
-            }
-            assertTrue("Task should have completed running in allotted time.", service.isTerminated());
-        } catch (InterruptedException e) {
-            fail("Test Interrupted.");
-        }
-        assertEquals(numMessages, processor.getMessageCount());
-        assertEquals(numMessages, counter.getNumReceived());
-        assertEquals(numMessages, counter.getNumEmitted());
-        assertEquals(0, counter.getNumUnhandledErrors());
-        assertEquals(0.0, counter.getErrorRate(), 0.0);
+  @Test
+  public void testProcessorTask() {
+    int numMessages = 100;
+    PassthroughDatumCounterProcessor processor = new PassthroughDatumCounterProcessor("");
+    StreamsProcessorTask task = new StreamsProcessorTask(processor);
+    StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, null, -1);
+    task.setStreamsTaskCounter(counter);
+    BlockingQueue<StreamsDatum> outQueue = new LinkedBlockingQueue<>();
+    BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
+    task.addOutputQueue(outQueue);
+    task.addInputQueue(inQueue);
+    assertEquals(numMessages, task.getInputQueues().get(0).size());
+    ExecutorService service = Executors.newFixedThreadPool(1);
+    service.submit(task);
+    int attempts = 0;
+    while(inQueue.size() != 0 && outQueue.size() != numMessages) {
+      Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
+      ++attempts;
+      if(attempts == 10) {
+        fail("Processor task failed to output "+numMessages+" in a timely fashion.");
+      }
+    }
+    task.stopTask();;
+    service.shutdown();
+    try {
+      if(!service.awaitTermination(5, TimeUnit.SECONDS)){
+        service.shutdownNow();
+        fail("Service did not terminate.");
+      }
+      assertTrue("Task should have completed running in allotted time.", service.isTerminated());
+    } catch (InterruptedException e) {
+      fail("Test Interrupted.");
     }
+    assertEquals(numMessages, processor.getMessageCount());
+    assertEquals(numMessages, counter.getNumReceived());
+    assertEquals(numMessages, counter.getNumEmitted());
+    assertEquals(0, counter.getNumUnhandledErrors());
+    assertEquals(0.0, counter.getErrorRate(), 0.0);
+  }
 
-    @Test
-    public void testWriterTask() {
-        int numMessages = 100;
-        DatumCounterWriter writer = new DatumCounterWriter("");
-        StreamsPersistWriterTask task = new StreamsPersistWriterTask(writer);
-        StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, null, -1);
-        task.setStreamsTaskCounter(counter);
-        BlockingQueue<StreamsDatum> outQueue = new LinkedBlockingQueue<>();
-        BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
+  @Test
+  public void testWriterTask() {
+    int numMessages = 100;
+    DatumCounterWriter writer = new DatumCounterWriter("");
+    StreamsPersistWriterTask task = new StreamsPersistWriterTask(writer);
+    StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, null, -1);
+    task.setStreamsTaskCounter(counter);
+    BlockingQueue<StreamsDatum> outQueue = new LinkedBlockingQueue<>();
+    BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
 
-        Exception exp = null;
-        try {
-            task.addOutputQueue(outQueue);
-        } catch (UnsupportedOperationException uoe) {
-            exp = uoe;
-        }
-        assertNotNull(exp);
-        task.addInputQueue(inQueue);
-        assertEquals(numMessages, task.getInputQueues().get(0).size());
-        ExecutorService service = Executors.newFixedThreadPool(1);
-        service.submit(task);
-        int attempts = 0;
-        while(inQueue.size() != 0 ) {
-            Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
-            ++attempts;
-            if(attempts == 10) {
-                fail("Processor task failed to output "+numMessages+" in a timely fashion.");
-            }
-        }
-        task.stopTask();
-        service.shutdown();
-        try {
-            if(!service.awaitTermination(15, TimeUnit.SECONDS)){
-                service.shutdownNow();
-                fail("Service did not terminate.");
-            }
-            assertTrue("Task should have completed running in allotted time.", service.isTerminated());
-        } catch (InterruptedException e) {
-            fail("Test Interrupted.");
-        }
-        assertEquals(numMessages, writer.getDatumsCounted());
-        assertEquals(numMessages, counter.getNumReceived());
-        assertEquals(0, counter.getNumEmitted());
-        assertEquals(0, counter.getNumUnhandledErrors());
-        assertEquals(0.0, counter.getErrorRate(), 0.0);
+    Exception exp = null;
+    try {
+      task.addOutputQueue(outQueue);
+    } catch (UnsupportedOperationException uoe) {
+      exp = uoe;
     }
+    assertNotNull(exp);
+    task.addInputQueue(inQueue);
+    assertEquals(numMessages, task.getInputQueues().get(0).size());
+    ExecutorService service = Executors.newFixedThreadPool(1);
+    service.submit(task);
+    int attempts = 0;
+    while(inQueue.size() != 0 ) {
+      Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
+      ++attempts;
+      if(attempts == 10) {
+        fail("Processor task failed to output "+numMessages+" in a timely fashion.");
+      }
+    }
+    task.stopTask();
+    service.shutdown();
+    try {
+      if(!service.awaitTermination(15, TimeUnit.SECONDS)){
+        service.shutdownNow();
+        fail("Service did not terminate.");
+      }
+      assertTrue("Task should have completed running in allotted time.", service.isTerminated());
+    } catch (InterruptedException e) {
+      fail("Test Interrupted.");
+    }
+    assertEquals(numMessages, writer.getDatumsCounted());
+    assertEquals(numMessages, counter.getNumReceived());
+    assertEquals(0, counter.getNumEmitted());
+    assertEquals(0, counter.getNumUnhandledErrors());
+    assertEquals(0.0, counter.getErrorRate(), 0.0);
+  }
 
-    @Test
-    public void testMergeTask() {
-        int numMessages = 100;
-        int incoming = 5;
-        StreamsMergeTask task = new StreamsMergeTask();
-        BlockingQueue<StreamsDatum> outQueue = new LinkedBlockingQueue<>();
-        task.addOutputQueue(outQueue);
-        for(int i=0; i < incoming; ++i) {
-            task.addInputQueue(createInputQueue(numMessages));
-        }
-        ExecutorService service = Executors.newFixedThreadPool(1);
-        service.submit(task);
-        int attempts = 0;
-        while(outQueue.size() != incoming * numMessages ) {
-            Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
-            ++attempts;
-            if(attempts == 10) {
-                assertEquals("Processor task failed to output " + (numMessages * incoming) + " in a timely fashion.", (numMessages * incoming), outQueue.size());
-            }
-        }
-        task.stopTask();
-        service.shutdown();
-        try {
-            if(!service.awaitTermination(5, TimeUnit.SECONDS)){
-                service.shutdownNow();
-                fail("Service did not terminate.");
-            }
-            assertTrue("Task should have completed running in allotted time.", service.isTerminated());
-        } catch (InterruptedException e) {
-            fail("Test Interrupted.");
-        }
+  @Test
+  public void testMergeTask() {
+    int numMessages = 100;
+    int incoming = 5;
+    StreamsMergeTask task = new StreamsMergeTask();
+    BlockingQueue<StreamsDatum> outQueue = new LinkedBlockingQueue<>();
+    task.addOutputQueue(outQueue);
+    for(int i=0; i < incoming; ++i) {
+      task.addInputQueue(createInputQueue(numMessages));
+    }
+    ExecutorService service = Executors.newFixedThreadPool(1);
+    service.submit(task);
+    int attempts = 0;
+    while(outQueue.size() != incoming * numMessages ) {
+      Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
+      ++attempts;
+      if(attempts == 10) {
+        assertEquals("Processor task failed to output " + (numMessages * incoming) + " in a timely fashion.", (numMessages * incoming), outQueue.size());
+      }
     }
+    task.stopTask();
+    service.shutdown();
+    try {
+      if(!service.awaitTermination(5, TimeUnit.SECONDS)){
+        service.shutdownNow();
+        fail("Service did not terminate.");
+      }
+      assertTrue("Task should have completed running in allotted time.", service.isTerminated());
+    } catch (InterruptedException e) {
+      fail("Test Interrupted.");
+    }
+  }
 
-    @Test
-    public void testBranching() {
-        int numMessages = 100;
-        PassthroughDatumCounterProcessor processor = new PassthroughDatumCounterProcessor("");
-        StreamsProcessorTask task = new StreamsProcessorTask(processor);
-        BlockingQueue<StreamsDatum> outQueue1 = new LinkedBlockingQueue<>();
-        BlockingQueue<StreamsDatum> outQueue2 = new LinkedBlockingQueue<>();
-        BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
-        task.addOutputQueue(outQueue1);
-        task.addOutputQueue(outQueue2);
-        task.addInputQueue(inQueue);
-        assertEquals(numMessages, task.getInputQueues().get(0).size());
-        ExecutorService service = Executors.newFixedThreadPool(1);
-        service.submit(task);
-        int attempts = 0;
-        while(inQueue.size() != 0 ) {
-            Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
-            ++attempts;
-            if(attempts == 10) {
-                assertEquals("Processor task failed to output "+(numMessages)+" in a timely fashion.", 0, inQueue.size());
-            }
-        }
-        task.stopTask();
+  @Test
+  public void testBranching() {
+    int numMessages = 100;
+    PassthroughDatumCounterProcessor processor = new PassthroughDatumCounterProcessor("");
+    StreamsProcessorTask task = new StreamsProcessorTask(processor);
+    BlockingQueue<StreamsDatum> outQueue1 = new LinkedBlockingQueue<>();
+    BlockingQueue<StreamsDatum> outQueue2 = new LinkedBlockingQueue<>();
+    BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
+    task.addOutputQueue(outQueue1);
+    task.addOutputQueue(outQueue2);
+    task.addInputQueue(inQueue);
+    assertEquals(numMessages, task.getInputQueues().get(0).size());
+    ExecutorService service = Executors.newFixedThreadPool(1);
+    service.submit(task);
+    int attempts = 0;
+    while(inQueue.size() != 0 ) {
+      Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
+      ++attempts;
+      if(attempts == 10) {
+        assertEquals("Processor task failed to output "+(numMessages)+" in a timely fashion.", 0, inQueue.size());
+      }
+    }
+    task.stopTask();
 
-        service.shutdown();
-        try {
-            if(!service.awaitTermination(5, TimeUnit.SECONDS)){
-                service.shutdownNow();
-                fail("Service did not terminate.");
-            }
-            assertTrue("Task should have completed running in allotted time.", service.isTerminated());
-        } catch (InterruptedException e) {
-            fail("Test Interrupted.");
-        }
-        assertEquals(numMessages, processor.getMessageCount());
-        assertEquals(numMessages, outQueue1.size());
-        assertEquals(numMessages, outQueue2.size());
+    service.shutdown();
+    try {
+      if(!service.awaitTermination(5, TimeUnit.SECONDS)){
+        service.shutdownNow();
+        fail("Service did not terminate.");
+      }
+      assertTrue("Task should have completed running in allotted time.", service.isTerminated());
+    } catch (InterruptedException e) {
+      fail("Test Interrupted.");
     }
+    assertEquals(numMessages, processor.getMessageCount());
+    assertEquals(numMessages, outQueue1.size());
+    assertEquals(numMessages, outQueue2.size());
+  }
 
-    @Test
-    public void testBranchingSerialization() {
-        int numMessages = 1;
-        PassthroughDatumCounterProcessor processor = new PassthroughDatumCounterProcessor("");
-        StreamsProcessorTask task = new StreamsProcessorTask(processor);
-        BlockingQueue<StreamsDatum> outQueue1 = new LinkedBlockingQueue<>();
-        BlockingQueue<StreamsDatum> outQueue2 = new LinkedBlockingQueue<>();
-        BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
-        task.addOutputQueue(outQueue1);
-        task.addOutputQueue(outQueue2);
-        task.addInputQueue(inQueue);
-        ExecutorService service = Executors.newFixedThreadPool(1);
-        service.submit(task);
-        int attempts = 0;
-        while(inQueue.size() != 0 ) {
-            Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
-            ++attempts;
-            if(attempts == 10) {
-                assertEquals("Processor task failed to output "+(numMessages)+" in a timely fashion.", 0, inQueue.size());
-            }
-        }
-        task.stopTask();
+  @Test
+  public void testBranchingSerialization() {
+    int numMessages = 1;
+    PassthroughDatumCounterProcessor processor = new PassthroughDatumCounterProcessor("");
+    StreamsProcessorTask task = new StreamsProcessorTask(processor);
+    BlockingQueue<StreamsDatum> outQueue1 = new LinkedBlockingQueue<>();
+    BlockingQueue<StreamsDatum> outQueue2 = new LinkedBlockingQueue<>();
+    BlockingQueue<StreamsDatum> inQueue = createInputQueue(numMessages);
+    task.addOutputQueue(outQueue1);
+    task.addOutputQueue(outQueue2);
+    task.addInputQueue(inQueue);
+    ExecutorService service = Executors.newFixedThreadPool(1);
+    service.submit(task);
+    int attempts = 0;
+    while(inQueue.size() != 0 ) {
+      Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS);
+      ++attempts;
+      if(attempts == 10) {
+        assertEquals("Processor task failed to output "+(numMessages)+" in a timely fashion.", 0, inQueue.size());
+      }
+    }
+    task.stopTask();
 
-        service.shutdown();
-        try {
-            if(!service.awaitTermination(5, TimeUnit.SECONDS)){
-                service.shutdownNow();
-                fail("Service did not terminate.");
-            }
-            assertTrue("Task should have completed running in allotted time.", service.isTerminated());
-        } catch (InterruptedException e) {
-            fail("Test Interrupted.");
-        }
-        assertEquals(numMessages, processor.getMessageCount());
-        assertEquals(numMessages, outQueue1.size());
-        assertEquals(numMessages, outQueue2.size());
-        StreamsDatum datum1 = outQueue1.poll();
-        StreamsDatum datum2 = outQueue2.poll();
-        assertNotNull(datum1);
-        assertEquals(datum1, datum2);
-        datum1.setDocument("a");
-        assertNotEquals(datum1, datum2);
+    service.shutdown();
+    try {
+      if(!service.awaitTermination(5, TimeUnit.SECONDS)){
+        service.shutdownNow();
+        fail("Service did not terminate.");
+      }
+      assertTrue("Task should have completed running in allotted time.", service.isTerminated());
+    } catch (InterruptedException e) {
+      fail("Test Interrupted.");
     }
+    assertEquals(numMessages, processor.getMessageCount());
+    assertEquals(numMessages, outQueue1.size());
+    assertEquals(numMessages, outQueue2.size());
+    StreamsDatum datum1 = outQueue1.poll();
+    StreamsDatum datum2 = outQueue2.poll();
+    assertNotNull(datum1);
+    assertEquals(datum1, datum2);
+    datum1.setDocument("a");
+    assertNotEquals(datum1, datum2);
+  }
 
-    private BlockingQueue<StreamsDatum> createInputQueue(int numDatums) {
-        BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
-        for(int i=0; i < numDatums; ++i) {
-            queue.add(new StreamsDatum(i));
-        }
-        return queue;
+  private BlockingQueue<StreamsDatum> createInputQueue(int numDatums) {
+    BlockingQueue<StreamsDatum> queue = new LinkedBlockingQueue<>();
+    for(int i=0; i < numDatums; ++i) {
+      queue.add(new StreamsDatum(i));
     }
+    return queue;
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/StreamsProviderTaskTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/StreamsProviderTaskTest.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/StreamsProviderTaskTest.java
index 222566d..782e232 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/StreamsProviderTaskTest.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/tasks/StreamsProviderTaskTest.java
@@ -23,132 +23,142 @@ import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.util.ComponentUtils;
+
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
 import java.util.Queue;
-import java.util.concurrent.*;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
 
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.is;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.atLeast;
+import static org.mockito.Mockito.atMost;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 /**
  * Tests the StreamsProviderTask.
  */
 public class StreamsProviderTaskTest {
 
-    protected StreamsProvider mockProvider;
-    protected ExecutorService pool;
-
-    @Before
-    public void setup() {
-        mockProvider = mock(StreamsProvider.class);
-        pool = Executors.newFixedThreadPool(1);
-    }
-
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
-    }
-
-    @Test
-    public void runPerpetual() {
-        StreamsProviderTask task = new StreamsProviderTask(mockProvider, true, null);
-        when(mockProvider.isRunning()).thenReturn(true);
-        when(mockProvider.readCurrent()).thenReturn(new StreamsResultSet(new LinkedBlockingQueue<StreamsDatum>()));
-        task.setTimeout(500);
-        task.setSleepTime(10);
-        task.run();
-        //Setting this to at least 2 means that it was correctly set to perpetual mode
-        verify(mockProvider, atLeast(2)).readCurrent();
-        verify(mockProvider, atMost(1)).prepare(null);
-    }
-
-    @Test
-    public void flushes() {
-        BlockingQueue<StreamsDatum> out = new LinkedBlockingQueue<>();
-        StreamsProviderTask task = new StreamsProviderTask(mockProvider, true, null);
-        when(mockProvider.isRunning()).thenReturn(true);
-        when(mockProvider.readCurrent()).thenReturn(new StreamsResultSet(getQueue(3)));
-        task.setTimeout(100);
-        task.setSleepTime(10);
-        task.getOutputQueues().add(out);
-        task.run();
-        assertThat(out.size(), is(equalTo(3)));
+  protected StreamsProvider mockProvider;
+  protected ExecutorService pool;
+
+  @Before
+  public void setup() {
+    mockProvider = mock(StreamsProvider.class);
+    pool = Executors.newFixedThreadPool(1);
+  }
+
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
-
-    protected Queue<StreamsDatum> getQueue(int numElems) {
-        Queue<StreamsDatum> results = new LinkedBlockingQueue<>();
-        for(int i=0; i<numElems; i++) {
-            results.add(new StreamsDatum(Math.random()));
-        }
-        return results;
-    }
-
-    @Test
-    public void runNonPerpetual() {
-        StreamsProviderTask task = new StreamsProviderTask(mockProvider, false, null);
-        when(mockProvider.isRunning()).thenReturn(true);
-        when(mockProvider.readCurrent()).thenReturn(new StreamsResultSet(new LinkedBlockingQueue<StreamsDatum>()));
-        task.setTimeout(500);
-        task.setSleepTime(10);
-        task.run();
-        //In read current mode, this should only be called 1 time
-        verify(mockProvider, atLeast(1)).readCurrent();
-        verify(mockProvider, atMost(1)).prepare(null);
-    }
-
-    @Test
-    public void stoppable() throws InterruptedException {
-        StreamsProviderTask task = new StreamsProviderTask(mockProvider, true, null);
-        when(mockProvider.isRunning()).thenReturn(true);
-        when(mockProvider.readCurrent()).thenReturn(new StreamsResultSet(new LinkedBlockingQueue<StreamsDatum>()));
-        task.setTimeout(-1);
-        task.setSleepTime(10);
-        Future<?> taskResult = pool.submit(task);
-
-        //After a few milliseconds, tell the task that it is to stop and wait until it says it isn't or a timeout happens
-        int count = 0;
-        do {
-            Thread.sleep(100);
-            if(count == 0) {
-                task.stopTask();
-            }
-        } while(++count < 10 && !taskResult.isDone());
-        verifyNotRunning(task, taskResult);
-
+  }
+
+  @Test
+  public void runPerpetual() {
+    StreamsProviderTask task = new StreamsProviderTask(mockProvider, true, null);
+    when(mockProvider.isRunning()).thenReturn(true);
+    when(mockProvider.readCurrent()).thenReturn(new StreamsResultSet(new LinkedBlockingQueue<StreamsDatum>()));
+    task.setTimeout(500);
+    task.setSleepTime(10);
+    task.run();
+    //Setting this to at least 2 means that it was correctly set to perpetual mode
+    verify(mockProvider, atLeast(2)).readCurrent();
+    verify(mockProvider, atMost(1)).prepare(null);
+  }
+
+  @Test
+  public void flushes() {
+    BlockingQueue<StreamsDatum> out = new LinkedBlockingQueue<>();
+    StreamsProviderTask task = new StreamsProviderTask(mockProvider, true, null);
+    when(mockProvider.isRunning()).thenReturn(true);
+    when(mockProvider.readCurrent()).thenReturn(new StreamsResultSet(getQueue(3)));
+    task.setTimeout(100);
+    task.setSleepTime(10);
+    task.getOutputQueues().add(out);
+    task.run();
+    assertThat(out.size(), is(equalTo(3)));
+  }
+
+  protected Queue<StreamsDatum> getQueue(int numElems) {
+    Queue<StreamsDatum> results = new LinkedBlockingQueue<>();
+    for(int i=0; i<numElems; i++) {
+      results.add(new StreamsDatum(Math.random()));
     }
-
-    @Test
-    public void earlyException() throws InterruptedException {
-        StreamsProviderTask task = new StreamsProviderTask(mockProvider, true, null);
-        when(mockProvider.isRunning()).thenReturn(true);
-        doThrow(new RuntimeException()).when(mockProvider).prepare(null);
-        task.setTimeout(-1);
-        task.setSleepTime(10);
-        Future<?> taskResult = pool.submit(task);
-        int count = 0;
-        while(++count < 10 && !taskResult.isDone()) {
-            Thread.sleep(100);
-        }
-        verifyNotRunning(task, taskResult);
+    return results;
+  }
+
+  @Test
+  public void runNonPerpetual() {
+    StreamsProviderTask task = new StreamsProviderTask(mockProvider, false, null);
+    when(mockProvider.isRunning()).thenReturn(true);
+    when(mockProvider.readCurrent()).thenReturn(new StreamsResultSet(new LinkedBlockingQueue<StreamsDatum>()));
+    task.setTimeout(500);
+    task.setSleepTime(10);
+    task.run();
+    //In read current mode, this should only be called 1 time
+    verify(mockProvider, atLeast(1)).readCurrent();
+    verify(mockProvider, atMost(1)).prepare(null);
+  }
+
+  @Test
+  public void stoppable() throws InterruptedException {
+    StreamsProviderTask task = new StreamsProviderTask(mockProvider, true, null);
+    when(mockProvider.isRunning()).thenReturn(true);
+    when(mockProvider.readCurrent()).thenReturn(new StreamsResultSet(new LinkedBlockingQueue<StreamsDatum>()));
+    task.setTimeout(-1);
+    task.setSleepTime(10);
+    Future<?> taskResult = pool.submit(task);
+
+    //After a few milliseconds, tell the task that it is to stop and wait until it says it isn't or a timeout happens
+    int count = 0;
+    do {
+      Thread.sleep(100);
+      if(count == 0) {
+        task.stopTask();
+      }
+    } while(++count < 10 && !taskResult.isDone());
+    verifyNotRunning(task, taskResult);
+
+  }
+
+  @Test
+  public void earlyException() throws InterruptedException {
+    StreamsProviderTask task = new StreamsProviderTask(mockProvider, true, null);
+    when(mockProvider.isRunning()).thenReturn(true);
+    doThrow(new RuntimeException()).when(mockProvider).prepare(null);
+    task.setTimeout(-1);
+    task.setSleepTime(10);
+    Future<?> taskResult = pool.submit(task);
+    int count = 0;
+    while(++count < 10 && !taskResult.isDone()) {
+      Thread.sleep(100);
     }
-
-    protected void verifyNotRunning(StreamsProviderTask task, Future<?> taskResult) {
-        //Make sure the task is reporting that it is complete and that the run method returned
-        if(taskResult.isDone()) {
-            assertThat(task.isRunning(), is(false));
-        } else {
-            ComponentUtils.shutdownExecutor(pool, 0, 10);
-            fail();
-        }
+    verifyNotRunning(task, taskResult);
+  }
+
+  protected void verifyNotRunning(StreamsProviderTask task, Future<?> taskResult) {
+    //Make sure the task is reporting that it is complete and that the run method returned
+    if(taskResult.isDone()) {
+      assertThat(task.isRunning(), is(false));
+    } else {
+      ComponentUtils.shutdownExecutor(pool, 0, 10);
+      fail();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/DoNothingProcessor.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/DoNothingProcessor.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/DoNothingProcessor.java
index cad7873..31a83ec 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/DoNothingProcessor.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/DoNothingProcessor.java
@@ -20,6 +20,7 @@ package org.apache.streams.local.test.processors;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -31,34 +32,34 @@ import java.util.List;
  */
 public class DoNothingProcessor implements StreamsProcessor {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(DoNothingProcessor.class);
+  private final static Logger LOGGER = LoggerFactory.getLogger(DoNothingProcessor.class);
 
-    public final static String STREAMS_ID = "DoNothingProcessor";
+  public final static String STREAMS_ID = "DoNothingProcessor";
 
-    List<StreamsDatum> result;
+  List<StreamsDatum> result;
 
-    public DoNothingProcessor() {
-    }
+  public DoNothingProcessor() {
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        this.result = new LinkedList<StreamsDatum>();
-        result.add(entry);
-        return result;
-    }
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    this.result = new LinkedList<StreamsDatum>();
+    result.add(entry);
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-    }
+  }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.debug("Processor clean up!");
-    }
+  @Override
+  public void cleanUp() {
+    LOGGER.debug("Processor clean up!");
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/PassthroughDatumCounterProcessor.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/PassthroughDatumCounterProcessor.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/PassthroughDatumCounterProcessor.java
index 970a8dc..43343e5 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/PassthroughDatumCounterProcessor.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/PassthroughDatumCounterProcessor.java
@@ -20,10 +20,15 @@ package org.apache.streams.local.test.processors;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -32,76 +37,76 @@ import java.util.concurrent.atomic.AtomicLong;
  */
 public class PassthroughDatumCounterProcessor implements StreamsProcessor {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(PassthroughDatumCounterProcessor.class);
+  private final static Logger LOGGER = LoggerFactory.getLogger(PassthroughDatumCounterProcessor.class);
 
-    public final static String STREAMS_ID = "PassthroughDatumCounterProcessor";
+  public final static String STREAMS_ID = "PassthroughDatumCounterProcessor";
 
-    /**
-     * Set of all ids that have been claimed.  Ensures all instances are assigned unique ids
-     */
-    public static Set<Integer> CLAIMED_ID = new HashSet<Integer>();
-    /**
-     * Random instance to generate ids
-     */
-    public static final Random RAND = new Random();
-    /**
-     * Set of instance ids that received data. Usefully for testing parrallelization is actually working.
-     */
-    public final static Set<Integer> SEEN_DATA = new HashSet<Integer>();
-    /**
-     * The total count of data seen by a all instances of a processor.
-     */
-    public static final ConcurrentHashMap<String, AtomicLong> COUNTS = new ConcurrentHashMap<>();
+  /**
+   * Set of all ids that have been claimed.  Ensures all instances are assigned unique ids
+   */
+  public static Set<Integer> CLAIMED_ID = new HashSet<Integer>();
+  /**
+   * Random instance to generate ids
+   */
+  public static final Random RAND = new Random();
+  /**
+   * Set of instance ids that received data. Usefully for testing parrallelization is actually working.
+   */
+  public final static Set<Integer> SEEN_DATA = new HashSet<Integer>();
+  /**
+   * The total count of data seen by a all instances of a processor.
+   */
+  public static final ConcurrentHashMap<String, AtomicLong> COUNTS = new ConcurrentHashMap<>();
 
-    private int count = 0;
-    private int id;
-    private String procId;
+  private int count = 0;
+  private int id;
+  private String procId;
 
-    public PassthroughDatumCounterProcessor(String procId) {
-        this.procId = procId;
-    }
+  public PassthroughDatumCounterProcessor(String procId) {
+    this.procId = procId;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        ++this.count;
-        List<StreamsDatum> result = new LinkedList<StreamsDatum>();
-        result.add(entry);
-        synchronized (SEEN_DATA) {
-            SEEN_DATA.add(this.id);
-        }
-        return result;
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    ++this.count;
+    List<StreamsDatum> result = new LinkedList<StreamsDatum>();
+    result.add(entry);
+    synchronized (SEEN_DATA) {
+      SEEN_DATA.add(this.id);
     }
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        synchronized (CLAIMED_ID) {
-            this.id = RAND.nextInt();
-            while(!CLAIMED_ID.add(this.id)) {
-                this.id = RAND.nextInt();
-            }
-        }
+  @Override
+  public void prepare(Object configurationObject) {
+    synchronized (CLAIMED_ID) {
+      this.id = RAND.nextInt();
+      while(!CLAIMED_ID.add(this.id)) {
+        this.id = RAND.nextInt();
+      }
     }
+  }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.debug("Clean up {}", this.procId);
-        synchronized (COUNTS) {
-            AtomicLong count = COUNTS.get(this.procId);
-            if(count == null) {
-                COUNTS.put(this.procId, new AtomicLong(this.count));
-            } else {
-                count.addAndGet(this.count);
-            }
-        }
-        LOGGER.debug("{}\t{}", this.procId, this.count);
+  @Override
+  public void cleanUp() {
+    LOGGER.debug("Clean up {}", this.procId);
+    synchronized (COUNTS) {
+      AtomicLong count = COUNTS.get(this.procId);
+      if(count == null) {
+        COUNTS.put(this.procId, new AtomicLong(this.count));
+      } else {
+        count.addAndGet(this.count);
+      }
     }
+    LOGGER.debug("{}\t{}", this.procId, this.count);
+  }
 
-    public int getMessageCount() {
-        return this.count;
-    }
+  public int getMessageCount() {
+    return this.count;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/SlowProcessor.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/SlowProcessor.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/SlowProcessor.java
index 2b172cd..227e0f8 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/SlowProcessor.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/processors/SlowProcessor.java
@@ -19,40 +19,41 @@
 
 package org.apache.streams.local.test.processors;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 
+import com.google.common.collect.Lists;
+
 import java.util.List;
 
 /**
  */
 public class SlowProcessor  implements StreamsProcessor {
 
-    public final static String STREAMS_ID = "DoNothingProcessor";
+  public final static String STREAMS_ID = "DoNothingProcessor";
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        try {
-            Thread.sleep(1000);
-        } catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-        }
-        return Lists.newArrayList(entry);
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    try {
+      Thread.sleep(1000);
+    } catch (InterruptedException e) {
+      Thread.currentThread().interrupt();
     }
+    return Lists.newArrayList(entry);
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-    }
+  }
 
-    @Override
-    public void cleanUp() {
+  @Override
+  public void cleanUp() {
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/EmptyResultSetProvider.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/EmptyResultSetProvider.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/EmptyResultSetProvider.java
index bdbc9ec..571c0fc 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/EmptyResultSetProvider.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/EmptyResultSetProvider.java
@@ -19,10 +19,11 @@
 
 package org.apache.streams.local.test.providers;
 
-import com.google.common.collect.Queues;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.core.StreamsResultSet;
+
+import com.google.common.collect.Queues;
 import org.joda.time.DateTime;
 
 import java.math.BigInteger;
@@ -32,43 +33,43 @@ import java.math.BigInteger;
  */
 public class EmptyResultSetProvider implements StreamsProvider {
 
-    @Override
-    public String getId() {
-        return "EmptyResultSetProvider";
-    }
+  @Override
+  public String getId() {
+    return "EmptyResultSetProvider";
+  }
 
-    @Override
-    public void startStream() {
-        //NOP
-    }
+  @Override
+  public void startStream() {
+    //NOP
+  }
 
-    @Override
-    public StreamsResultSet readCurrent() {
-        return new StreamsResultSet(Queues.<StreamsDatum>newLinkedBlockingQueue());
-    }
+  @Override
+  public StreamsResultSet readCurrent() {
+    return new StreamsResultSet(Queues.<StreamsDatum>newLinkedBlockingQueue());
+  }
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return new StreamsResultSet(Queues.<StreamsDatum>newLinkedBlockingQueue());
-    }
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return new StreamsResultSet(Queues.<StreamsDatum>newLinkedBlockingQueue());
+  }
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return new StreamsResultSet(Queues.<StreamsDatum>newLinkedBlockingQueue());
-    }
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return new StreamsResultSet(Queues.<StreamsDatum>newLinkedBlockingQueue());
+  }
 
-    @Override
-    public boolean isRunning() {
-        return true;
-    }
+  @Override
+  public boolean isRunning() {
+    return true;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        //NOP
-    }
+  @Override
+  public void prepare(Object configurationObject) {
+    //NOP
+  }
 
-    @Override
-    public void cleanUp() {
-        //NOP
-    }
+  @Override
+  public void cleanUp() {
+    //NOP
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/NumericMessageProvider.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/NumericMessageProvider.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/NumericMessageProvider.java
index d7c1568..88494a8 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/NumericMessageProvider.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/test/providers/NumericMessageProvider.java
@@ -18,93 +18,91 @@
 
 package org.apache.streams.local.test.providers;
 
-import com.google.common.collect.Queues;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.core.StreamsResultSet;
+
+import com.google.common.collect.Queues;
 import org.joda.time.DateTime;
 
 import java.math.BigInteger;
-import java.util.Iterator;
 import java.util.Queue;
 import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.atomic.AtomicBoolean;
 
 /**
  * Test StreamsProvider that sends out StreamsDatums numbered from 0 to numMessages.
  */
 public class NumericMessageProvider implements StreamsProvider {
 
-    @Override
-    public String getId() {
-        return "NumericMessageProvider";
-    }
-
-    private static final int DEFAULT_BATCH_SIZE = 100;
-
-    private int numMessages;
-    private BlockingQueue<StreamsDatum> data;
-    private volatile boolean complete = false;
-
-    public NumericMessageProvider(int numMessages) {
-        this.numMessages = numMessages;
-    }
-
-    @Override
-    public void startStream() {
-        this.data = constructQueue();
+  @Override
+  public String getId() {
+    return "NumericMessageProvider";
+  }
+
+  private static final int DEFAULT_BATCH_SIZE = 100;
+
+  private int numMessages;
+  private BlockingQueue<StreamsDatum> data;
+  private volatile boolean complete = false;
+
+  public NumericMessageProvider(int numMessages) {
+    this.numMessages = numMessages;
+  }
+
+  @Override
+  public void startStream() {
+    this.data = constructQueue();
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    int batchSize = 0;
+    Queue<StreamsDatum> batch = Queues.newLinkedBlockingQueue();
+    try {
+      while (!this.data.isEmpty() && batchSize < DEFAULT_BATCH_SIZE) {
+        batch.add(this.data.take());
+        ++batchSize;
+      }
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
     }
-
-    @Override
-    public StreamsResultSet readCurrent() {
-        int batchSize = 0;
-        Queue<StreamsDatum> batch = Queues.newLinkedBlockingQueue();
-        try {
-            while (!this.data.isEmpty() && batchSize < DEFAULT_BATCH_SIZE) {
-                batch.add(this.data.take());
-                ++batchSize;
-            }
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
 //        System.out.println("******************\n**\tBatchSize="+batch.size()+"\n******************");
-        this.complete = batch.isEmpty() && this.data.isEmpty();
-        return new StreamsResultSet(batch);
-    }
-
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return new StreamsResultSet(constructQueue());
-    }
-
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return new StreamsResultSet(constructQueue());
-    }
-
-    @Override
-    public boolean isRunning() {
-        return !this.complete;
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        this.data = constructQueue();
-    }
-
-    @Override
-    public void cleanUp() {
-
-    }
-
-    private BlockingQueue<StreamsDatum> constructQueue() {
-        BlockingQueue<StreamsDatum> datums = Queues.newArrayBlockingQueue(numMessages);
-        for(int i=0;i<numMessages;i++) {
-            datums.add(new StreamsDatum(i));
-        }
-        return datums;
+    this.complete = batch.isEmpty() && this.data.isEmpty();
+    return new StreamsResultSet(batch);
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return new StreamsResultSet(constructQueue());
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return new StreamsResultSet(constructQueue());
+  }
+
+  @Override
+  public boolean isRunning() {
+    return !this.complete;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.data = constructQueue();
+  }
+
+  @Override
+  public void cleanUp() {
+
+  }
+
+  private BlockingQueue<StreamsDatum> constructQueue() {
+    BlockingQueue<StreamsDatum> datums = Queues.newArrayBlockingQueue(numMessages);
+    for(int i=0;i<numMessages;i++) {
+      datums.add(new StreamsDatum(i));
     }
+    return datums;
+  }
 }
 
 



[16/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeEventClassifierTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeEventClassifierTest.java b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeEventClassifierTest.java
index 57f9be5..16565bb 100644
--- a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeEventClassifierTest.java
+++ b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeEventClassifierTest.java
@@ -18,32 +18,33 @@
 
 package com.youtube.serializer;
 
-import com.google.api.services.youtube.model.Video;
 import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.api.services.youtube.model.Video;
 import org.junit.Test;
 
-
 import static org.junit.Assert.assertEquals;
 
 public class YoutubeEventClassifierTest {
-    private final String testVideo = "{\"etag\":\"\\\"4FSIjSQU83ZJMYAO0IqRYMvZX98/V0q3OIauZ3ZAkszLUDbHL45yEGM\\\"\",\"id\":\"sUOepRctwVE\",\"kind\":\"youtube#video\",\"snippet\":{\"channelId\":\"UCNENOn2nmwguQYkejKhJGPQ\",\"channelTitle\":\"Carilion Clinic\",\"description\":\"Join Carilion Clinic's Heart Failure experts for a LIVE Google+ Hangout on Feb. 23, 12:30-1 p.m. to learn more about heart failure, treatment options, and lifestyle changes. Learn more: https://plus.google.com/u/0/events/cj074q9r6csgv6i2kqhi2isc6k0\",\"publishedAt\":{\"value\":1422977409000,\"dateOnly\":false,\"timeZoneShift\":-360},\"thumbnails\":{\"default\":{\"height\":480,\"url\":\"https://i.ytimg.com/vi/sUOepRctwVE/sddefault.jpg\",\"width\":640}},\"title\":\"Be Heart Smart: Congestive Heart Failure LIVE Event\"},\"statistics\":{\"commentCount\":1,\"dislikeCount\":0,\"favoriteCount\":0,\"likeCount\":0,\"viewCount\":9}}";
-    private final String testObjectNode = "{\"etag\":\"\\\"4FSIjSQU83ZJMYAO0IqRYMvZX98/V0q3OIauZ3ZAkszLUDbHL45yEGM\\\"\",\"id\":\"sUOepRctwVE\",\"kind\":\"youtube#somethingElse\",\"snippet\":{\"channelId\":\"UCNENOn2nmwguQYkejKhJGPQ\",\"channelTitle\":\"Carilion Clinic\",\"description\":\"Join Carilion Clinic's Heart Failure experts for a LIVE Google+ Hangout on Feb. 23, 12:30-1 p.m. to learn more about heart failure, treatment options, and lifestyle changes. Learn more: https://plus.google.com/u/0/events/cj074q9r6csgv6i2kqhi2isc6k0\",\"publishedAt\":{\"value\":1422977409000,\"dateOnly\":false,\"timeZoneShift\":-360},\"thumbnails\":{\"default\":{\"height\":480,\"url\":\"https://i.ytimg.com/vi/sUOepRctwVE/sddefault.jpg\",\"width\":640}},\"title\":\"Be Heart Smart: Congestive Heart Failure LIVE Event\"},\"statistics\":{\"commentCount\":1,\"dislikeCount\":0,\"favoriteCount\":0,\"likeCount\":0,\"viewCount\":9}}";
-    @Test
-    public void testVideoClassification() {
-        Class klass = YoutubeEventClassifier.detectClass(testVideo);
-
-        assertEquals(klass, Video.class);
-    }
-
-    @Test(expected=IllegalArgumentException.class)
-    public void testExceptionClassification() {
-        YoutubeEventClassifier.detectClass("");
-    }
-
-    @Test
-    public void testObjectNodeClassification() {
-        Class klass = YoutubeEventClassifier.detectClass(testObjectNode);
-
-        assertEquals(klass, ObjectNode.class);
-    }
+
+  private final String testVideo = "{\"etag\":\"\\\"4FSIjSQU83ZJMYAO0IqRYMvZX98/V0q3OIauZ3ZAkszLUDbHL45yEGM\\\"\",\"id\":\"sUOepRctwVE\",\"kind\":\"youtube#video\",\"snippet\":{\"channelId\":\"UCNENOn2nmwguQYkejKhJGPQ\",\"channelTitle\":\"Carilion Clinic\",\"description\":\"Join Carilion Clinic's Heart Failure experts for a LIVE Google+ Hangout on Feb. 23, 12:30-1 p.m. to learn more about heart failure, treatment options, and lifestyle changes. Learn more: https://plus.google.com/u/0/events/cj074q9r6csgv6i2kqhi2isc6k0\",\"publishedAt\":{\"value\":1422977409000,\"dateOnly\":false,\"timeZoneShift\":-360},\"thumbnails\":{\"default\":{\"height\":480,\"url\":\"https://i.ytimg.com/vi/sUOepRctwVE/sddefault.jpg\",\"width\":640}},\"title\":\"Be Heart Smart: Congestive Heart Failure LIVE Event\"},\"statistics\":{\"commentCount\":1,\"dislikeCount\":0,\"favoriteCount\":0,\"likeCount\":0,\"viewCount\":9}}";
+  private final String testObjectNode = "{\"etag\":\"\\\"4FSIjSQU83ZJMYAO0IqRYMvZX98/V0q3OIauZ3ZAkszLUDbHL45yEGM\\\"\",\"id\":\"sUOepRctwVE\",\"kind\":\"youtube#somethingElse\",\"snippet\":{\"channelId\":\"UCNENOn2nmwguQYkejKhJGPQ\",\"channelTitle\":\"Carilion Clinic\",\"description\":\"Join Carilion Clinic's Heart Failure experts for a LIVE Google+ Hangout on Feb. 23, 12:30-1 p.m. to learn more about heart failure, treatment options, and lifestyle changes. Learn more: https://plus.google.com/u/0/events/cj074q9r6csgv6i2kqhi2isc6k0\",\"publishedAt\":{\"value\":1422977409000,\"dateOnly\":false,\"timeZoneShift\":-360},\"thumbnails\":{\"default\":{\"height\":480,\"url\":\"https://i.ytimg.com/vi/sUOepRctwVE/sddefault.jpg\",\"width\":640}},\"title\":\"Be Heart Smart: Congestive Heart Failure LIVE Event\"},\"statistics\":{\"commentCount\":1,\"dislikeCount\":0,\"favoriteCount\":0,\"likeCount\":0,\"viewCount\":9}}";
+
+  @Test
+  public void testVideoClassification() {
+    Class klass = YoutubeEventClassifier.detectClass(testVideo);
+
+    assertEquals(klass, Video.class);
+  }
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testExceptionClassification() {
+    YoutubeEventClassifier.detectClass("");
+  }
+
+  @Test
+  public void testObjectNodeClassification() {
+    Class klass = YoutubeEventClassifier.detectClass(testObjectNode);
+
+    assertEquals(klass, ObjectNode.class);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeVideoSerDeTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeVideoSerDeTest.java b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeVideoSerDeTest.java
index c162f41..29afd19 100644
--- a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeVideoSerDeTest.java
+++ b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/serializer/YoutubeVideoSerDeTest.java
@@ -15,17 +15,19 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package com.youtube.serializer;
 
-import com.fasterxml.jackson.databind.DeserializationFeature;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.module.SimpleModule;
-import com.google.api.services.youtube.model.Video;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Provider;
+
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+import com.google.api.services.youtube.model.Video;
 import org.joda.time.DateTime;
 import org.junit.Before;
 import org.junit.Test;
@@ -38,75 +40,84 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
+/**
+ * Test for YoutubeVideoSerDe.
+ */
 public class YoutubeVideoSerDeTest {
-    private final static Logger LOGGER = LoggerFactory.getLogger(YoutubeVideoSerDeTest.class);
-    private final String testVideo = "{\"etag\":\"\\\"4FSIjSQU83ZJMYAO0IqRYMvZX98/V0q3OIauZ3ZAkszLUDbHL45yEGM\\\"\",\"id\":\"sUOepRctwVE\",\"kind\":\"youtube#video\",\"snippet\":{\"channelId\":\"UCNENOn2nmwguQYkejKhJGPQ\",\"channelTitle\":\"Carilion Clinic\",\"description\":\"Join Carilion Clinic's Heart Failure experts for a LIVE Google+ Hangout on Feb. 23, 12:30-1 p.m. to learn more about heart failure, treatment options, and lifestyle changes. Learn more: https://plus.google.com/u/0/events/cj074q9r6csgv6i2kqhi2isc6k0\",\"publishedAt\":{\"value\":1422977409000,\"dateOnly\":false,\"timeZoneShift\":-360},\"thumbnails\":{\"default\":{\"height\":480,\"url\":\"https://i.ytimg.com/vi/sUOepRctwVE/sddefault.jpg\",\"width\":640}},\"title\":\"Be Heart Smart: Congestive Heart Failure LIVE Event\"},\"statistics\":{\"commentCount\":1,\"dislikeCount\":0,\"favoriteCount\":0,\"likeCount\":0,\"viewCount\":9}}";
-    private ObjectMapper objectMapper;
-    private YoutubeActivityUtil youtubeActivityUtil;
-
-    @Before
-    public void setup() {
-        objectMapper = StreamsJacksonMapper.getInstance();
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Video.class, new YoutubeVideoDeserializer());
-        objectMapper.registerModule(simpleModule);
-        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-
-        youtubeActivityUtil = new YoutubeActivityUtil();
-    }
 
-    @Test
-    public void testVideoObject() {
-        LOGGER.info("raw: {}", testVideo);
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeVideoSerDeTest.class);
+  private final String testVideo = "{\"etag\":\"\\\"4FSIjSQU83ZJMYAO0IqRYMvZX98/V0q3OIauZ3ZAkszLUDbHL45yEGM\\\"\",\"id\":\"sUOepRctwVE\",\"kind\":\"youtube#video\",\"snippet\":{\"channelId\":\"UCNENOn2nmwguQYkejKhJGPQ\",\"channelTitle\":\"Carilion Clinic\",\"description\":\"Join Carilion Clinic's Heart Failure experts for a LIVE Google+ Hangout on Feb. 23, 12:30-1 p.m. to learn more about heart failure, treatment options, and lifestyle changes. Learn more: https://plus.google.com/u/0/events/cj074q9r6csgv6i2kqhi2isc6k0\",\"publishedAt\":{\"value\":1422977409000,\"dateOnly\":false,\"timeZoneShift\":-360},\"thumbnails\":{\"default\":{\"height\":480,\"url\":\"https://i.ytimg.com/vi/sUOepRctwVE/sddefault.jpg\",\"width\":640}},\"title\":\"Be Heart Smart: Congestive Heart Failure LIVE Event\"},\"statistics\":{\"commentCount\":1,\"dislikeCount\":0,\"favoriteCount\":0,\"likeCount\":0,\"viewCount\":9}}";
+  private ObjectMapper objectMapper;
+  private YoutubeActivityUtil youtubeActivityUtil;
 
-        try {
-            Activity activity = new Activity();
+  /**
+   * setup for test.
+   */
+  @Before
+  public void setup() {
+    objectMapper = StreamsJacksonMapper.getInstance();
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Video.class, new YoutubeVideoDeserializer());
+    objectMapper.registerModule(simpleModule);
+    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
 
-            Video video = objectMapper.readValue(testVideo, Video.class);
+    youtubeActivityUtil = new YoutubeActivityUtil();
+  }
 
-            youtubeActivityUtil.updateActivity(video, activity, "testChannelId");
-            LOGGER.info("activity: {}", activity);
+  @Test
+  public void testVideoObject() {
+    LOGGER.info("raw: {}", testVideo);
 
-            assertNotNull(activity);
-            assert (activity.getId().contains("id:youtube:post"));
-            assertEquals(activity.getVerb(), "post");
+    try {
+      Activity activity = new Activity();
 
-            Provider provider = activity.getProvider();
-            assertEquals(provider.getId(), "id:providers:youtube");
-            assertEquals(provider.getDisplayName(), "YouTube");
+      Video video = objectMapper.readValue(testVideo, Video.class);
 
-            ActivityObject actor = activity.getActor();
-            assert (actor.getId().contains("id:youtube:"));
-            assertNotNull(actor.getDisplayName());
-            assertNotNull(actor.getSummary());
+      youtubeActivityUtil.updateActivity(video, activity, "testChannelId");
+      LOGGER.info("activity: {}", activity);
 
-            assertNotNull(activity.getTitle());
-            assertNotNull(activity.getUrl());
-            assertNotNull(activity.getContent());
+      assertNotNull(activity);
+      assert (activity.getId().contains("id:youtube:post"));
+      assertEquals(activity.getVerb(), "post");
 
-            assertEquals(activity.getPublished().getClass(), DateTime.class);
+      Provider provider = activity.getProvider();
+      assertEquals(provider.getId(), "id:providers:youtube");
+      assertEquals(provider.getDisplayName(), "YouTube");
 
-            Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+      ActivityObject actor = activity.getActor();
+      assert (actor.getId().contains("id:youtube:"));
+      assertNotNull(actor.getDisplayName());
+      assertNotNull(actor.getSummary());
 
-            assertNotNull(extensions.get("youtube"));
-            assertNotNull(extensions.get("likes"));
+      assertNotNull(activity.getTitle());
+      assertNotNull(activity.getUrl());
+      assertNotNull(activity.getContent());
 
-            assertTrue(testActivityObject(activity));
-        } catch (Exception e) {
-            LOGGER.error("Exception while testing the Ser/De functionality of the Video deserializer: {}", e);
-        }
-    }
+      assertEquals(activity.getPublished().getClass(), DateTime.class);
 
-    private boolean testActivityObject(Activity activity) {
-        boolean valid = false;
+      Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
 
-        ActivityObject obj = activity.getObject();
+      assertNotNull(extensions.get("youtube"));
+      assertNotNull(extensions.get("likes"));
 
-        if(obj.getObjectType().equals("video") && !obj.getImage().equals(null) &&
-                !obj.getUrl().equals("null") && obj.getUrl().contains("https://www.youtube.com/watch?v=")) {
-            valid = true;
-        }
+      assertTrue(testActivityObject(activity));
+    } catch (Exception ex) {
+      LOGGER.error("Exception while testing the Ser/De functionality of the Video deserializer: {}", ex);
+    }
+  }
+
+  private boolean testActivityObject(Activity activity) {
+    boolean valid = false;
 
-        return valid;
+    ActivityObject obj = activity.getObject();
+
+    if ( obj.getObjectType().equals("video")
+        && !obj.getImage().equals(null)
+        && !obj.getUrl().equals("null")
+        && obj.getUrl().contains("https://www.youtube.com/watch?v=")) {
+      valid = true;
     }
+
+    return valid;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeChannelProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeChannelProviderIT.java b/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeChannelProviderIT.java
index 1f53df8..2e143de 100644
--- a/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeChannelProviderIT.java
+++ b/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeChannelProviderIT.java
@@ -28,43 +28,43 @@ import java.io.FileReader;
 import java.io.LineNumberReader;
 
 /**
- * Created by sblackmon on 10/13/16.
+ * YoutubeChannelProviderIT integration test for YoutubeChannelProvider.
  */
 public class YoutubeChannelProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeChannelProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeChannelProviderIT.class);
 
-    @Test
-    public void testYoutubeChannelProvider() throws Exception {
+  @Test
+  public void testYoutubeChannelProvider() throws Exception {
 
-        String configfile = "./target/test-classes/YoutubeChannelProviderIT.conf";
-        String outfile = "./target/test-classes/YoutubeChannelProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/YoutubeChannelProviderIT.conf";
+    String outfile = "./target/test-classes/YoutubeChannelProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                YoutubeChannelProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        YoutubeChannelProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 1);
+    assert (outCounter.getLineNumber() >= 1);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeUserActivityProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeUserActivityProviderIT.java b/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeUserActivityProviderIT.java
index a8b92cf..dd0eaab 100644
--- a/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeUserActivityProviderIT.java
+++ b/streams-contrib/streams-provider-youtube/src/test/java/org/apache/streams/youtube/test/providers/YoutubeUserActivityProviderIT.java
@@ -28,43 +28,43 @@ import java.io.FileReader;
 import java.io.LineNumberReader;
 
 /**
- * Created by sblackmon on 10/13/16.
+ * Integration Test for YoutubeUserActivityProvider.
  */
 public class YoutubeUserActivityProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeUserActivityProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeUserActivityProviderIT.class);
 
-    @Test
-    public void testYoutubeUserActivityProvider() throws Exception {
+  @Test
+  public void testYoutubeUserActivityProvider() throws Exception {
 
-        String configfile = "./target/test-classes/YoutubeUserActivityProviderIT.conf";
-        String outfile = "./target/test-classes/YoutubeUserActivityProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/YoutubeUserActivityProviderIT.conf";
+    String outfile = "./target/test-classes/YoutubeUserActivityProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                YoutubeUserActivityProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        YoutubeUserActivityProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 250);
+    assert (outCounter.getLineNumber() >= 250);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/DatumStatus.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/DatumStatus.java b/streams-core/src/main/java/org/apache/streams/core/DatumStatus.java
index b85b2d0..5846665 100644
--- a/streams-core/src/main/java/org/apache/streams/core/DatumStatus.java
+++ b/streams-core/src/main/java/org/apache/streams/core/DatumStatus.java
@@ -18,8 +18,11 @@
 
 package org.apache.streams.core;
 
+/**
+ * Status of StreamsDatum.
+ */
 public enum DatumStatus {
-    SUCCESS,
-    PARTIAL,
-    FAIL
+  SUCCESS,
+  PARTIAL,
+  FAIL
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/DatumStatusCountable.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/DatumStatusCountable.java b/streams-core/src/main/java/org/apache/streams/core/DatumStatusCountable.java
index 58518b9..c00e378 100644
--- a/streams-core/src/main/java/org/apache/streams/core/DatumStatusCountable.java
+++ b/streams-core/src/main/java/org/apache/streams/core/DatumStatusCountable.java
@@ -24,7 +24,7 @@ package org.apache.streams.core;
 @Deprecated
 public interface DatumStatusCountable {
 
-    @Deprecated
-    public DatumStatusCounter getDatumStatusCounter();
+  @Deprecated
+  public DatumStatusCounter getDatumStatusCounter();
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/DatumStatusCounter.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/DatumStatusCounter.java b/streams-core/src/main/java/org/apache/streams/core/DatumStatusCounter.java
index 2e598ce..250cc7e 100644
--- a/streams-core/src/main/java/org/apache/streams/core/DatumStatusCounter.java
+++ b/streams-core/src/main/java/org/apache/streams/core/DatumStatusCounter.java
@@ -21,72 +21,115 @@ package org.apache.streams.core;
 import java.io.Serializable;
 
 @Deprecated
-public class DatumStatusCounter implements Serializable
-{
-    private volatile int attempted = 0;
-    private volatile int success = 0;
-    private volatile int fail = 0;
-    private volatile int partial = 0;
-    private volatile int emitted = 0;
-
-    public int getAttempted()             { return this.attempted; }
-    public int getSuccess()             { return this.success; }
-    public int getFail()                { return  this.fail; }
-    public int getPartial()             { return this.partial; }
-    public int getEmitted()             { return this.emitted; }
-
-    public DatumStatusCounter() {
-    }
+public class DatumStatusCounter implements Serializable {
 
-    @Deprecated
-    public void add(DatumStatusCounter datumStatusCounter) {
-        this.attempted += datumStatusCounter.getAttempted();
-        this.success += datumStatusCounter.getSuccess();
-        this.partial = datumStatusCounter.getPartial();
-        this.fail += datumStatusCounter.getFail();
-        this.emitted += datumStatusCounter.getEmitted();
-    }
+  private volatile int attempted = 0;
+  private volatile int success = 0;
+  private volatile int fail = 0;
+  private volatile int partial = 0;
+  private volatile int emitted = 0;
 
-    @Deprecated
-    public void incrementAttempt() {
-        this.attempted += 1;
-    }
+  public int getAttempted() {
+    return this.attempted;
+  }
 
-    @Deprecated
-    public void incrementAttempt(int counter) {
-        this.attempted += counter;
-    }
+  public int getSuccess() {
+    return this.success;
+  }
 
-    @Deprecated
-    public synchronized void incrementStatus(DatumStatus workStatus) {
-        // add this to the record counter
-        switch(workStatus) {
-            case SUCCESS: this.success++; break;
-            case PARTIAL: this.partial++; break;
-            case FAIL: this.fail++; break;
-        }
-        this.emitted += 1;
-    }
+  public int getFail() {
+    return this.fail;
+  }
+
+  public int getPartial() {
+    return this.partial;
+  }
 
-    @Deprecated
-    public synchronized void incrementStatus(DatumStatus workStatus, int counter) {
-        // add this to the record counter
-        switch(workStatus) {
-            case SUCCESS: this.success += counter; break;
-            case PARTIAL: this.partial += counter; break;
-            case FAIL: this.fail += counter; break;
-        }
-        this.emitted += counter;
+  public int getEmitted() {
+    return this.emitted;
+  }
+
+  public DatumStatusCounter() {
+  }
+
+  /**
+   * accumulate partial DatumStatusCounter.
+   * @param datumStatusCounter DatumStatusCounter
+   */
+  @Deprecated
+  public void add(DatumStatusCounter datumStatusCounter) {
+    this.attempted += datumStatusCounter.getAttempted();
+    this.success += datumStatusCounter.getSuccess();
+    this.partial = datumStatusCounter.getPartial();
+    this.fail += datumStatusCounter.getFail();
+    this.emitted += datumStatusCounter.getEmitted();
+  }
+
+  @Deprecated
+  public void incrementAttempt() {
+    this.attempted += 1;
+  }
+
+  @Deprecated
+  public void incrementAttempt(int counter) {
+    this.attempted += counter;
+  }
+
+  /**
+   * increment specific DatumStatus by 1.
+   * @param workStatus DatumStatus
+   */
+  @Deprecated
+  public synchronized void incrementStatus(DatumStatus workStatus) {
+    // add this to the record counter
+    switch (workStatus) {
+      case SUCCESS:
+        this.success++;
+        break;
+      case PARTIAL:
+        this.partial++;
+        break;
+      case FAIL:
+        this.fail++;
+        break;
+      default:
+        break;
     }
+    this.emitted += 1;
+  }
 
-    @Override
-    public String toString() {
-        return "DatumStatusCounter{" +
-                "attempted=" + attempted +
-                ", success=" + success +
-                ", fail=" + fail +
-                ", partial=" + partial +
-                ", emitted=" + emitted +
-                '}';
+  /**
+   * increment specific DatumStatus by count.
+   * @param workStatus DatumStatus
+   * @param counter counter
+   */
+  @Deprecated
+  public synchronized void incrementStatus(DatumStatus workStatus, int counter) {
+    // add this to the record counter
+    switch (workStatus) {
+      case SUCCESS:
+        this.success += counter;
+        break;
+      case PARTIAL:
+        this.partial += counter;
+        break;
+      case FAIL:
+        this.fail += counter;
+        break;
+      default:
+        break;
     }
+    this.emitted += counter;
+  }
+
+  @Override
+  public String toString() {
+    return "DatumStatusCounter{"
+        + "attempted=" + attempted
+        + ", success=" + success
+        + ", fail=" + fail
+        + ", partial=" + partial
+        + ", emitted=" + emitted
+        + '}';
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamBuilder.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamBuilder.java b/streams-core/src/main/java/org/apache/streams/core/StreamBuilder.java
index 39bc937..1052647 100644
--- a/streams-core/src/main/java/org/apache/streams/core/StreamBuilder.java
+++ b/streams-core/src/main/java/org/apache/streams/core/StreamBuilder.java
@@ -19,6 +19,7 @@
 package org.apache.streams.core;
 
 import org.apache.streams.config.StreamsConfiguration;
+
 import org.joda.time.DateTime;
 
 import java.io.Serializable;
@@ -39,82 +40,82 @@ import java.math.BigInteger;
  */
 public interface StreamBuilder extends Serializable {
 
-    public StreamBuilder setStreamsConfiguration(StreamsConfiguration configuration);
-
-    public StreamsConfiguration getStreamsConfiguration();
-
-    /**
-     * Add a {@link org.apache.streams.core.StreamsProcessor} to the data processing stream.
-     * @param processorId unique id for this processor - must be unique across the entire stream
-     * @param processor the processor to execute
-     * @param numTasks the number of instances of this processor to run concurrently
-     * @param connectToIds the ids of the {@link org.apache.streams.core.StreamsOperation} that this process will
-     *                     receive data from.
-     * @return this
-     */
-    public StreamBuilder addStreamsProcessor(String processorId, StreamsProcessor processor, int numTasks, String... connectToIds);
-
-    /**
-     * Add a {@link org.apache.streams.core.StreamsPersistWriter} to the data processing stream.
-     * @param persistWriterId unique id for this processor - must be unique across the entire stream
-     * @param writer the writer to execute
-     * @param numTasks the number of instances of this writer to run concurrently
-     * @param connectToIds the ids of the {@link org.apache.streams.core.StreamsOperation} that this process will
-     *                     receive data from.
-     * @return this
-     */
-    public StreamBuilder addStreamsPersistWriter(String persistWriterId, StreamsPersistWriter writer, int numTasks, String... connectToIds);
-
-    /**
-     * Add a {@link org.apache.streams.core.StreamsProvider} to the data processing stream.  The provider will execute
-     * {@link org.apache.streams.core.StreamsProvider:readCurrent()} to produce data.
-     * @param streamId unique if for this provider - must be unique across the entire stream.
-     * @param provider provider to execute
-     * @return this
-     */
-    public StreamBuilder newPerpetualStream(String streamId, StreamsProvider provider);
-
-    /**
-     * Add a {@link org.apache.streams.core.StreamsProvider} to the data processing stream.  The provider will execute
-     * {@link org.apache.streams.core.StreamsProvider:readCurrent()} to produce data.
-     * @param streamId unique if for this provider - must be unique across the entire stream.
-     * @param provider provider to execute
-     * @return this
-     */
-    public StreamBuilder newReadCurrentStream(String streamId, StreamsProvider provider);
-
-    /**
-     * Add a {@link org.apache.streams.core.StreamsProvider} to the data processing stream.  The provider will execute
-     * {@link org.apache.streams.core.StreamsProvider:readNext(BigInteger)} to produce data.
-     * @param streamId unique if for this provider - must be unique across the entire stream.
-     * @param provider provider to execute
-     * @param sequence sequence to pass to {@link org.apache.streams.core.StreamsProvider:readNext(BigInteger)} method
-     * @return this
-     */
-    public StreamBuilder newReadNewStream(String streamId, StreamsProvider provider, BigInteger sequence);
-
-    /**
-     * Add a {@link org.apache.streams.core.StreamsProvider} to the data processing stream.  The provider will execute
-     * {@link org.apache.streams.core.StreamsProvider:readRange(DateTime, DateTime)} to produce data. Whether the start
-     * and end dates are inclusive or exclusive is up to the implementation.
-     * @param streamId unique if for this provider - must be unique across the entire stream.
-     * @param provider provider to execute
-     * @param start start date
-     * @param end end date
-     * @return this
-     */
-    public StreamBuilder newReadRangeStream(String streamId, StreamsProvider provider, DateTime start, DateTime end);
-
-    /**
-     * Builds the stream, and starts it or submits it based on implementation.
-     */
-    public void start();
-
-    /**
-     * Stops the streams processing.  No guarantee on a smooth shutdown. Optional method, may not be implemented in
-     * all cases.
-     */
-    public void stop();
+  public StreamBuilder setStreamsConfiguration(StreamsConfiguration configuration);
+
+  public StreamsConfiguration getStreamsConfiguration();
+
+  /**
+   * Add a {@link org.apache.streams.core.StreamsProcessor} to the data processing stream.
+   * @param processorId unique id for this processor - must be unique across the entire stream
+   * @param processor the processor to execute
+   * @param numTasks the number of instances of this processor to run concurrently
+   * @param connectToIds the ids of the {@link org.apache.streams.core.StreamsOperation} that this process will
+   *                     receive data from.
+   * @return this
+   */
+  public StreamBuilder addStreamsProcessor(String processorId, StreamsProcessor processor, int numTasks, String... connectToIds);
+
+  /**
+   * Add a {@link org.apache.streams.core.StreamsPersistWriter} to the data processing stream.
+   * @param persistWriterId unique id for this processor - must be unique across the entire stream
+   * @param writer the writer to execute
+   * @param numTasks the number of instances of this writer to run concurrently
+   * @param connectToIds the ids of the {@link org.apache.streams.core.StreamsOperation} that this process will
+   *                     receive data from.
+   * @return this
+   */
+  public StreamBuilder addStreamsPersistWriter(String persistWriterId, StreamsPersistWriter writer, int numTasks, String... connectToIds);
+
+  /**
+   * Add a {@link org.apache.streams.core.StreamsProvider} to the data processing stream.  The provider will execute
+   * {@link org.apache.streams.core.StreamsProvider:readCurrent()} to produce data.
+   * @param streamId unique if for this provider - must be unique across the entire stream.
+   * @param provider provider to execute
+   * @return this
+   */
+  public StreamBuilder newPerpetualStream(String streamId, StreamsProvider provider);
+
+  /**
+   * Add a {@link org.apache.streams.core.StreamsProvider} to the data processing stream.  The provider will execute
+   * {@link org.apache.streams.core.StreamsProvider:readCurrent()} to produce data.
+   * @param streamId unique if for this provider - must be unique across the entire stream.
+   * @param provider provider to execute
+   * @return this
+   */
+  public StreamBuilder newReadCurrentStream(String streamId, StreamsProvider provider);
+
+  /**
+   * Add a {@link org.apache.streams.core.StreamsProvider} to the data processing stream.  The provider will execute
+   * {@link org.apache.streams.core.StreamsProvider:readNext(BigInteger)} to produce data.
+   * @param streamId unique if for this provider - must be unique across the entire stream.
+   * @param provider provider to execute
+   * @param sequence sequence to pass to {@link org.apache.streams.core.StreamsProvider:readNext(BigInteger)} method
+   * @return this
+   */
+  public StreamBuilder newReadNewStream(String streamId, StreamsProvider provider, BigInteger sequence);
+
+  /**
+   * Add a {@link org.apache.streams.core.StreamsProvider} to the data processing stream.  The provider will execute
+   * {@link org.apache.streams.core.StreamsProvider:readRange(DateTime, DateTime)} to produce data. Whether the start
+   * and end dates are inclusive or exclusive is up to the implementation.
+   * @param streamId unique if for this provider - must be unique across the entire stream.
+   * @param provider provider to execute
+   * @param start start date
+   * @param end end date
+   * @return this
+   */
+  public StreamBuilder newReadRangeStream(String streamId, StreamsProvider provider, DateTime start, DateTime end);
+
+  /**
+   * Builds the stream, and starts it or submits it based on implementation.
+   */
+  public void start();
+
+  /**
+   * Stops the streams processing.  No guarantee on a smooth shutdown. Optional method, may not be implemented in
+   * all cases.
+   */
+  public void stop();
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamHandler.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamHandler.java b/streams-core/src/main/java/org/apache/streams/core/StreamHandler.java
deleted file mode 100644
index ca2e7ef..0000000
--- a/streams-core/src/main/java/org/apache/streams/core/StreamHandler.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.core;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Created by sblackmon on 1/6/14.
- */
-public class StreamHandler {
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(StreamHandler.class);
-
-    private volatile StreamState state;
-
-    public void setState(StreamState state) {
-        this.state = state;
-    }
-
-    public StreamState getState() {
-        return this.state;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamState.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamState.java b/streams-core/src/main/java/org/apache/streams/core/StreamState.java
deleted file mode 100644
index 0c24d29..0000000
--- a/streams-core/src/main/java/org/apache/streams/core/StreamState.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.core;
-
-public enum StreamState {
-    RUNNING,  //Stream is currently connected and running
-    STOPPED,  // Stream has been shut down and is stopped
-    CONNECTING, //Stream is attempting to connect to server
-    SHUTTING_DOWN, //Stream has initialized shutdown
-    DISCONNECTED //Stream has unintentionally lost connection
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamsDatum.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamsDatum.java b/streams-core/src/main/java/org/apache/streams/core/StreamsDatum.java
index 8367631..27f3d85 100644
--- a/streams-core/src/main/java/org/apache/streams/core/StreamsDatum.java
+++ b/streams-core/src/main/java/org/apache/streams/core/StreamsDatum.java
@@ -19,6 +19,7 @@
 package org.apache.streams.core;
 
 import org.apache.streams.pojo.json.Activity;
+
 import org.joda.time.DateTime;
 
 import java.io.Serializable;
@@ -28,142 +29,139 @@ import java.util.Map;
 
 public class StreamsDatum implements Serializable {
 
-    public StreamsDatum(Object document) {
-        this(document, null, null, null, new HashMap<>());
-    }
-
-    public StreamsDatum(Object document, String id) {
-        this(document, id, null, null, new HashMap<>());
-    }
-
-    public StreamsDatum(Object document, BigInteger sequenceid) {
-        this(document, null, null, sequenceid);
-    }
-
-    public StreamsDatum(Object document, DateTime timestamp) {
-        this(document, null, timestamp, null);
-    }
-
-    public StreamsDatum(Object document, DateTime timestamp, BigInteger sequenceid) {
-        this(document, null, timestamp, sequenceid);
-    }
-
-    public StreamsDatum(Object document, DateTime timestamp, Map<String, Object> metadata) {
-        this(document, null, timestamp, null, metadata);
-    }
-
-    public StreamsDatum(Object document, String id, DateTime timestamp) {
-        this(document, id, timestamp, null, new HashMap<>());
-    }
-
-    public StreamsDatum(Object document, String id, Map<String, Object> metadata) {
-        this(document, id, null, null, metadata);
-    }
-
-    public StreamsDatum(Object document, String id, BigInteger sequenceid, Map<String, Object> metadata) {
-        this(document, id, null, sequenceid, metadata);
-    }
-
-    public StreamsDatum(Object document, String id, BigInteger sequenceid) {
-        this(document, id, sequenceid, new HashMap<>());
-    }
-
-    public StreamsDatum(Object document, String id, DateTime timestamp, BigInteger sequenceid) {
-        this.document = document;
-        this.id = id;
-        this.timestamp = timestamp;
-        this.sequenceid = sequenceid;
-        this.metadata = new HashMap<>();
-    }
-
-    public StreamsDatum(Object document, String id, DateTime timestamp, BigInteger sequenceid, Map<String, Object> metadata) {
-        this.document = document;
-        this.id = id;
-        this.timestamp = timestamp;
-        this.sequenceid = sequenceid;
-        this.metadata = metadata;
-    }
-
-    public DateTime timestamp;
+  public StreamsDatum(Object document) {
+    this(document, null, null, null, new HashMap<>());
+  }
 
-    public BigInteger sequenceid;
+  public StreamsDatum(Object document, String id) {
+    this(document, id, null, null, new HashMap<>());
+  }
 
-    public Map<String, Object> metadata;
+  public StreamsDatum(Object document, BigInteger sequenceid) {
+    this(document, null, null, sequenceid);
+  }
 
-    public Object document;
+  public StreamsDatum(Object document, DateTime timestamp) {
+    this(document, null, timestamp, null);
+  }
 
-    private String id;
+  public StreamsDatum(Object document, DateTime timestamp, BigInteger sequenceid) {
+    this(document, null, timestamp, sequenceid);
+  }
+
+  public StreamsDatum(Object document, DateTime timestamp, Map<String, Object> metadata) {
+    this(document, null, timestamp, null, metadata);
+  }
 
-    public DateTime getTimestamp() {
-        return timestamp;
-    }
-
-    public void setTimestamp(DateTime timestamp) {
-        this.timestamp = timestamp;
-    }
-
-    public BigInteger getSequenceid() {
-        return sequenceid;
-    }
-
-    public void setSequenceid(BigInteger sequenceid) {
-        this.sequenceid = sequenceid;
-    }
-
-    public Map<String, Object> getMetadata() {
-        return metadata;
-    }
-
-    public void setMetadata(Map<String, Object> metadata) {
-        this.metadata = metadata;
-    }
-
-    public Object getDocument() {
-        return document;
-    }
-
-    public void setDocument(Object document) {
-        this.document = document;
-    }
-
-
-    public String getId(){
-        if(this.id == null && this.document instanceof Activity) {
-            return ((Activity)this.document).getId();
-        }
-        return id;
-    }
-
-    public void setId(String id) {
-        this.id = id;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if(o instanceof StreamsDatum) {
-            StreamsDatum that = (StreamsDatum) o;
-            if(this.document != null && this.document.equals(that.document)) {
-                return (this.timestamp != null ? this.timestamp.equals(that.timestamp) : that.timestamp == null) &&
-                        (this.sequenceid != null ? this.sequenceid.equals(that.sequenceid) : that.sequenceid == null);
-            }
-            else {
-                return that.document == null && this.document == null;
-            }
-        }
-        else {
-            return false;
-        }
-    }
-
-    @Override
-    public String toString() {
-        return "StreamsDatum{" +
-          "timestamp=" + timestamp +
-          ", sequenceid=" + sequenceid +
-          ", metadata=" + metadata +
-          ", document=" + document +
-          ", id='" + id + '\'' +
-          '}';
-    }
+  public StreamsDatum(Object document, String id, DateTime timestamp) {
+    this(document, id, timestamp, null, new HashMap<>());
+  }
+
+  public StreamsDatum(Object document, String id, Map<String, Object> metadata) {
+    this(document, id, null, null, metadata);
+  }
+
+  public StreamsDatum(Object document, String id, BigInteger sequenceid, Map<String, Object> metadata) {
+    this(document, id, null, sequenceid, metadata);
+  }
+
+  public StreamsDatum(Object document, String id, BigInteger sequenceid) {
+    this(document, id, sequenceid, new HashMap<>());
+  }
+
+  public StreamsDatum(Object document, String id, DateTime timestamp, BigInteger sequenceid) {
+    this.document = document;
+    this.id = id;
+    this.timestamp = timestamp;
+    this.sequenceid = sequenceid;
+    this.metadata = new HashMap<>();
+  }
+
+  public StreamsDatum(Object document, String id, DateTime timestamp, BigInteger sequenceid, Map<String, Object> metadata) {
+    this.document = document;
+    this.id = id;
+    this.timestamp = timestamp;
+    this.sequenceid = sequenceid;
+    this.metadata = metadata;
+  }
+
+  public DateTime timestamp;
+
+  public BigInteger sequenceid;
+
+  public Map<String, Object> metadata;
+
+  public Object document;
+
+  private String id;
+
+  public DateTime getTimestamp() {
+    return timestamp;
+  }
+
+  public void setTimestamp(DateTime timestamp) {
+    this.timestamp = timestamp;
+  }
+
+  public BigInteger getSequenceid() {
+    return sequenceid;
+  }
+
+  public void setSequenceid(BigInteger sequenceid) {
+    this.sequenceid = sequenceid;
+  }
+
+  public Map<String, Object> getMetadata() {
+    return metadata;
+  }
+
+  public void setMetadata(Map<String, Object> metadata) {
+    this.metadata = metadata;
+  }
+
+  public Object getDocument() {
+    return document;
+  }
+
+  public void setDocument(Object document) {
+    this.document = document;
+  }
+
+  public String getId() {
+    if (this.id == null && this.document instanceof Activity) {
+      return ((Activity)this.document).getId();
+    }
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public boolean equals(Object configurationObject) {
+    if (configurationObject instanceof StreamsDatum) {
+      StreamsDatum that = (StreamsDatum) configurationObject;
+      if (this.document != null && this.document.equals(that.document)) {
+        return (this.timestamp != null ? this.timestamp.equals(that.timestamp) : that.timestamp == null)
+            && (this.sequenceid != null ? this.sequenceid.equals(that.sequenceid) : that.sequenceid == null);
+      } else {
+        return that.document == null && this.document == null;
+      }
+    } else {
+      return false;
+    }
+  }
+
+  @Override
+  public String toString() {
+    return "StreamsDatum{"
+        + "timestamp=" + timestamp
+        + ", sequenceid=" + sequenceid
+        + ", metadata=" + metadata
+        + ", document=" + document
+        + ", id='" + id + '\''
+        + '}';
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamsFilter.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamsFilter.java b/streams-core/src/main/java/org/apache/streams/core/StreamsFilter.java
deleted file mode 100644
index 929b26f..0000000
--- a/streams-core/src/main/java/org/apache/streams/core/StreamsFilter.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.core;
-
-import java.util.Queue;
-
-public interface StreamsFilter {
-
-    void start();
-    void stop();
-
-    public void setProcessorInputQueue(Queue<StreamsDatum> inputQueue);
-    public Queue<StreamsDatum> getProcessorInputQueue();
-
-    public void setProcessorOutputQueue(Queue<StreamsDatum> outputQueue);
-    public Queue<StreamsDatum> getProcessorOutputQueue();
-
-    public boolean filter(StreamsDatum entry);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamsOperation.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamsOperation.java b/streams-core/src/main/java/org/apache/streams/core/StreamsOperation.java
index f5422a5..490f454 100644
--- a/streams-core/src/main/java/org/apache/streams/core/StreamsOperation.java
+++ b/streams-core/src/main/java/org/apache/streams/core/StreamsOperation.java
@@ -21,26 +21,26 @@ package org.apache.streams.core;
 import java.io.Serializable;
 
 /**
- *
+ * Each step in a pipeline is a StreamsOperation.
  */
 public interface StreamsOperation extends Serializable {
 
-    /**
-     * Each operation must publish an identifier.
-     */
-    public String getId();
+  /**
+   * Each operation must publish an identifier.
+   */
+  public String getId();
 
-    /**
-     * This method will be called after initialization/serialization. Initialize any non-serializable objects here.
-     * @param configurationObject Any object to help intialize the operation. ie. Map, JobContext, Properties, etc. The type
-     *                            will be based on where the operation is being run (ie. hadoop, storm, locally, etc.)
-     */
-    public void prepare(Object configurationObject);
+  /**
+   * This method will be called after initialization/serialization. Initialize any non-serializable objects here.
+   * @param configurationObject Any object to help intialize the operation. ie. Map, JobContext, Properties, etc. The type
+   *                            will be based on where the operation is being run (ie. hadoop, storm, locally, etc.)
+   */
+  public void prepare(Object configurationObject);
 
-    /**
-     * No guarantee that this method will ever be called.  But upon shutdown of the stream, an attempt to call this method
-     * will be made.
-     * Use this method to terminate connections, etc.
-     */
-    public void cleanUp();
+  /**
+   * No guarantee that this method will ever be called.  But upon shutdown of the stream, an attempt to call this method
+   * will be made.
+   * Use this method to terminate connections, etc.
+   */
+  public void cleanUp();
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamsPersistReader.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamsPersistReader.java b/streams-core/src/main/java/org/apache/streams/core/StreamsPersistReader.java
index 1a6b0d8..64063ac 100644
--- a/streams-core/src/main/java/org/apache/streams/core/StreamsPersistReader.java
+++ b/streams-core/src/main/java/org/apache/streams/core/StreamsPersistReader.java
@@ -23,16 +23,14 @@ import org.joda.time.DateTime;
 import java.math.BigInteger;
 
 /**
- *
  * Currently a duplicate interface.  Has exact same methods as StreamsProvider.
- * Future work should make this interface necessary I'm told.
- *
  */
 public interface StreamsPersistReader extends StreamsProvider {
 
-    StreamsResultSet readAll();
-    StreamsResultSet readNew(BigInteger sequence);
-    StreamsResultSet readRange(DateTime start, DateTime end);
+  StreamsResultSet readAll();
+
+  StreamsResultSet readNew(BigInteger sequence);
 
+  StreamsResultSet readRange(DateTime start, DateTime end);
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamsPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamsPersistWriter.java b/streams-core/src/main/java/org/apache/streams/core/StreamsPersistWriter.java
index 1af62e7..59797e4 100644
--- a/streams-core/src/main/java/org/apache/streams/core/StreamsPersistWriter.java
+++ b/streams-core/src/main/java/org/apache/streams/core/StreamsPersistWriter.java
@@ -18,12 +18,15 @@
 
 package org.apache.streams.core;
 
-public interface StreamsPersistWriter extends StreamsOperation{
+/**
+ * StreamsOperation for writing data out of a pipeline.
+ */
+public interface StreamsPersistWriter extends StreamsOperation {
 
-    /**
-     * Persist the StreamsDatum to the corresponding data store.
-     * @param entry to be stored.
-     */
-    void write( StreamsDatum entry );
+  /**
+   * Persist the StreamsDatum to the corresponding data store.
+   * @param entry to be stored.
+   */
+  void write( StreamsDatum entry );
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamsProcessor.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamsProcessor.java b/streams-core/src/main/java/org/apache/streams/core/StreamsProcessor.java
index b63e2d9..25e1a07 100644
--- a/streams-core/src/main/java/org/apache/streams/core/StreamsProcessor.java
+++ b/streams-core/src/main/java/org/apache/streams/core/StreamsProcessor.java
@@ -20,14 +20,14 @@ package org.apache.streams.core;
 
 import java.util.List;
 
-public interface StreamsProcessor extends StreamsOperation{
+public interface StreamsProcessor extends StreamsOperation {
 
-    /**
-     * Process/Analyze the {@link org.apache.streams.core.StreamsDatum} and return the the StreamsDatums that will
-     * passed to every down stream operation that reads from this processor.
-     * @param entry StreamsDatum to be process
-     * @return resulting StreamDatums from process. Should never be null or contain null object.  Empty list OK.
-     */
-    List<StreamsDatum> process( StreamsDatum entry );
+  /**
+   * Process/Analyze the {@link org.apache.streams.core.StreamsDatum} and return the the StreamsDatums that will
+   * passed to every down stream operation that reads from this processor.
+   * @param entry StreamsDatum to be processed
+   * @return resulting StreamDatums from processing. Should never be null or contain null object.  Empty list OK.
+   */
+  List<StreamsDatum> process( StreamsDatum entry );
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamsProvider.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamsProvider.java b/streams-core/src/main/java/org/apache/streams/core/StreamsProvider.java
index 7c27e34..2547343 100644
--- a/streams-core/src/main/java/org/apache/streams/core/StreamsProvider.java
+++ b/streams-core/src/main/java/org/apache/streams/core/StreamsProvider.java
@@ -18,45 +18,46 @@
 
 package org.apache.streams.core;
 
-import java.math.BigInteger;
 import org.joda.time.DateTime;
 
+import java.math.BigInteger;
+
 /**
  * A StreamsProvider represents the entry point into the Streams pipeline.  Providers are responsible for inserting
  * data into the pipeline in discrete result sets.
  */
 public interface StreamsProvider extends StreamsOperation {
 
-    /**
-     * Start the operation of the stream
-     */
-    void startStream();
-
-    /**
-     * Read the current items available from the provider
-     * @return a non-null {@link org.apache.streams.core.StreamsResultSet}
-     */
-    StreamsResultSet readCurrent();
-
-    /**
-     * TODO: Define how this operates or eliminate
-     * @param sequence
-     * @return {@link StreamsResultSet}
-     */
-    StreamsResultSet readNew(BigInteger sequence);
-
-    /**
-     * TODO: Define how this operates or eliminate
-     * @param start
-     * @param end
-     * @return {@link StreamsResultSet}
-     */
-    StreamsResultSet readRange(DateTime start, DateTime end);
-
-    /**
-     * Flag to indicate whether the provider is producing data
-     * @return true if the processor is actively awaiting or producing data.  False otherwise.
-     */
-    boolean isRunning();
+  /**
+   * Start the operation of the stream.
+   */
+  void startStream();
+
+  /**
+   * Read the current items available from the provider.
+   * @return a non-null {@link org.apache.streams.core.StreamsResultSet}
+   */
+  StreamsResultSet readCurrent();
+
+  /**
+   * Read data with sequenceId greater than sequence.
+   * @param sequence BigInteger sequence
+   * @return {@link StreamsResultSet}
+   */
+  StreamsResultSet readNew(BigInteger sequence);
+
+  /**
+   * Read data with event time between start DateTime and end DateTime.
+   * @param start start DateTime
+   * @param end end DateTime
+   * @return {@link StreamsResultSet}
+   */
+  StreamsResultSet readRange(DateTime start, DateTime end);
+
+  /**
+   * Flag to indicate whether the provider is still producing data.
+   * @return true if the processor is actively awaiting or producing data.  False otherwise.
+   */
+  boolean isRunning();
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/StreamsResultSet.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/StreamsResultSet.java b/streams-core/src/main/java/org/apache/streams/core/StreamsResultSet.java
index 1e1cbe2..d64f27e 100644
--- a/streams-core/src/main/java/org/apache/streams/core/StreamsResultSet.java
+++ b/streams-core/src/main/java/org/apache/streams/core/StreamsResultSet.java
@@ -21,40 +21,43 @@ package org.apache.streams.core;
 import java.util.Iterator;
 import java.util.Queue;
 
+/**
+ * StreamsResultSet is a wrapper for an Iterator around a set of StreamsDatum.
+ */
 public class StreamsResultSet implements Iterable<StreamsDatum> {
 
-    Queue<StreamsDatum> queue;
+  Queue<StreamsDatum> queue;
 
-    DatumStatusCounter counter;
+  DatumStatusCounter counter;
 
-    public StreamsResultSet(Queue<StreamsDatum> queue) {
-        this.queue = queue;
-    }
+  public StreamsResultSet(Queue<StreamsDatum> queue) {
+    this.queue = queue;
+  }
 
 
-    @Override
-    public Iterator<StreamsDatum> iterator() {
-        return queue.iterator();
-    }
+  @Override
+  public Iterator<StreamsDatum> iterator() {
+    return queue.iterator();
+  }
 
-    public int size() {
-        return queue.size();
-    }
+  public int size() {
+    return queue.size();
+  }
 
-    public Queue<StreamsDatum> getQueue() {
-        return queue;
-    }
+  public Queue<StreamsDatum> getQueue() {
+    return queue;
+  }
 
-    public void setQueue(Queue<StreamsDatum> queue) {
-        this.queue = queue;
-    }
+  public void setQueue(Queue<StreamsDatum> queue) {
+    this.queue = queue;
+  }
 
-    public DatumStatusCounter getCounter() {
-        return counter;
-    }
+  public DatumStatusCounter getCounter() {
+    return counter;
+  }
 
-    public void setCounter(DatumStatusCounter counter) {
-        this.counter = counter;
-    }
+  public void setCounter(DatumStatusCounter counter) {
+    this.counter = counter;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-core/src/main/java/org/apache/streams/core/util/DatumUtils.java
----------------------------------------------------------------------
diff --git a/streams-core/src/main/java/org/apache/streams/core/util/DatumUtils.java b/streams-core/src/main/java/org/apache/streams/core/util/DatumUtils.java
index a229413..7d80098 100644
--- a/streams-core/src/main/java/org/apache/streams/core/util/DatumUtils.java
+++ b/streams-core/src/main/java/org/apache/streams/core/util/DatumUtils.java
@@ -21,39 +21,40 @@ package org.apache.streams.core.util;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsOperation;
+
 import org.joda.time.DateTime;
 
 import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Provides common utilities for managing and manipulating StreamsDatums
+ * Provides common utilities for managing and manipulating StreamsDatums.
  */
 public class DatumUtils {
 
-    /**
-     * Adds an error occurred during a StreamsOperation step to the StreamsDatum's metadata.  By convention, errors are
-     * placed in the metadata under the "errors" and are organized by class simple name where the failure occurred.
-     *
-     * @param datum the datum on which the operation step errored
-     * @param e the error encountered
-     * @param operationClass the class of the operation
-     */
-    @SuppressWarnings("all")
-    public static void addErrorToMetadata(StreamsDatum datum, Throwable e, Class<? extends StreamsOperation> operationClass) {
-        if(!datum.getMetadata().containsKey("errors")) {
-            datum.getMetadata().put("errors", new HashMap<String, Throwable>());
-        }
-        Map<String, Throwable> errors = (Map)datum.getMetadata().get("errors");
-        errors.put(operationClass.getCanonicalName(), e);
+  /**
+   * Adds an error occurred during a StreamsOperation step to the StreamsDatum's metadata.  By convention, errors are
+   * placed in the metadata under the "errors" and are organized by class simple name where the failure occurred.
+   *
+   * @param datum the datum on which the operation step errored
+   * @param throwable the throwable encountered
+   * @param operationClass the class of the operation
+   */
+  @SuppressWarnings("all")
+  public static void addErrorToMetadata(StreamsDatum datum, Throwable throwable, Class<? extends StreamsOperation> operationClass) {
+    if (!datum.getMetadata().containsKey("errors")) {
+      datum.getMetadata().put("errors", new HashMap<String, Throwable>());
     }
+    Map<String, Throwable> errors = (Map)datum.getMetadata().get("errors");
+    errors.put(operationClass.getCanonicalName(), throwable);
+  }
 
-    public static StreamsDatum cloneDatum(StreamsDatum datum) {
-        StreamsDatum clone = new StreamsDatum(datum.getDocument());
-        clone.setId(datum.getId() == null ? null : datum.getId());
-        clone.setTimestamp(datum.getTimestamp() == null ? null : new DateTime(datum.getTimestamp()));
-        clone.setSequenceid(datum.getSequenceid() == null ? null : datum.getSequenceid());
-        clone.setMetadata(datum.getMetadata() == null ? null : new HashMap<>(datum.getMetadata()));
-        return clone;
-    }
+  public static StreamsDatum cloneDatum(StreamsDatum datum) {
+    StreamsDatum clone = new StreamsDatum(datum.getDocument());
+    clone.setId(datum.getId() == null ? null : datum.getId());
+    clone.setTimestamp(datum.getTimestamp() == null ? null : new DateTime(datum.getTimestamp()));
+    clone.setSequenceid(datum.getSequenceid() == null ? null : datum.getSequenceid());
+    clone.setMetadata(datum.getMetadata() == null ? null : new HashMap<>(datum.getMetadata()));
+    return clone;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/jackson/DatumStatusCounterDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/jackson/DatumStatusCounterDeserializer.java b/streams-monitoring/src/main/java/org/apache/streams/jackson/DatumStatusCounterDeserializer.java
index 8bfa28b..f13a44f 100644
--- a/streams-monitoring/src/main/java/org/apache/streams/jackson/DatumStatusCounterDeserializer.java
+++ b/streams-monitoring/src/main/java/org/apache/streams/jackson/DatumStatusCounterDeserializer.java
@@ -15,62 +15,67 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.jackson;
 
+import org.apache.streams.pojo.json.DatumStatusCounterBroadcast;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.JsonDeserializer;
 import com.fasterxml.jackson.databind.JsonNode;
-import org.apache.streams.pojo.json.DatumStatusCounterBroadcast;
 import org.slf4j.Logger;
 
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.util.Arrays;
 import javax.management.MBeanAttributeInfo;
 import javax.management.MBeanInfo;
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
-import java.io.IOException;
-import java.lang.management.ManagementFactory;
-import java.util.Arrays;
 
 public class DatumStatusCounterDeserializer extends JsonDeserializer<DatumStatusCounterBroadcast> {
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(DatumStatusCounterDeserializer.class);
 
-    public DatumStatusCounterDeserializer() {
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(DatumStatusCounterDeserializer.class);
 
-    }
+  public DatumStatusCounterDeserializer() {
 
-    @Override
-    public DatumStatusCounterBroadcast deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
-        try {
-            MBeanServer server = ManagementFactory.getPlatformMBeanServer();
+  }
 
-            DatumStatusCounterBroadcast datumStatusCounterBroadcast = new DatumStatusCounterBroadcast();
-            JsonNode attributes = jsonParser.getCodec().readTree(jsonParser);
+  @Override
+  public DatumStatusCounterBroadcast deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
+    try {
+      MBeanServer server = ManagementFactory.getPlatformMBeanServer();
 
-            ObjectName name = new ObjectName(attributes.get("canonicalName").asText());
-            MBeanInfo info = server.getMBeanInfo(name);
-            datumStatusCounterBroadcast.setName(name.toString());
+      DatumStatusCounterBroadcast datumStatusCounterBroadcast = new DatumStatusCounterBroadcast();
+      JsonNode attributes = jsonParser.getCodec().readTree(jsonParser);
 
-            for (MBeanAttributeInfo attribute : Arrays.asList(info.getAttributes())) {
-                try {
-                    switch(attribute.getName()) {
-                        case "Failed":
-                            datumStatusCounterBroadcast.setFailed((boolean) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "Passed":
-                            datumStatusCounterBroadcast.setPassed((boolean) server.getAttribute(name, attribute.getName()));
-                            break;
-                    }
-                } catch (Exception e) {
-                    LOGGER.error("Exception trying to deserialize DatumStatusCounterBroadcast object: {}", e);
-                }
-            }
+      ObjectName name = new ObjectName(attributes.get("canonicalName").asText());
+      MBeanInfo info = server.getMBeanInfo(name);
+      datumStatusCounterBroadcast.setName(name.toString());
 
-            return datumStatusCounterBroadcast;
-        } catch (Exception e) {
-            LOGGER.error("Exception trying to deserialize DatumStatusCounterBroadcast object: {}", e);
-            return null;
+      for (MBeanAttributeInfo attribute : Arrays.asList(info.getAttributes())) {
+        try {
+          switch (attribute.getName()) {
+            case "Failed":
+              datumStatusCounterBroadcast.setFailed((boolean) server.getAttribute(name, attribute.getName()));
+              break;
+            case "Passed":
+              datumStatusCounterBroadcast.setPassed((boolean) server.getAttribute(name, attribute.getName()));
+              break;
+            default:
+              break;
+          }
+        } catch (Exception ex) {
+          LOGGER.error("Exception trying to deserialize DatumStatusCounterBroadcast object: {}", ex);
         }
+      }
+
+      return datumStatusCounterBroadcast;
+    } catch (Exception ex) {
+      LOGGER.error("Exception trying to deserialize DatumStatusCounterBroadcast object: {}", ex);
+      return null;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/jackson/MemoryUsageDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/jackson/MemoryUsageDeserializer.java b/streams-monitoring/src/main/java/org/apache/streams/jackson/MemoryUsageDeserializer.java
index 43c9239..8259340 100644
--- a/streams-monitoring/src/main/java/org/apache/streams/jackson/MemoryUsageDeserializer.java
+++ b/streams-monitoring/src/main/java/org/apache/streams/jackson/MemoryUsageDeserializer.java
@@ -15,65 +15,70 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.jackson;
 
+import org.apache.streams.pojo.json.MemoryUsageBroadcast;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.JsonDeserializer;
 import com.fasterxml.jackson.databind.JsonNode;
-import org.apache.streams.pojo.json.MemoryUsageBroadcast;
 import org.slf4j.Logger;
 
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.util.Arrays;
 import javax.management.MBeanAttributeInfo;
 import javax.management.MBeanInfo;
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
 import javax.management.openmbean.CompositeDataSupport;
-import java.io.IOException;
-import java.lang.management.ManagementFactory;
-import java.util.Arrays;
 
 public class MemoryUsageDeserializer extends JsonDeserializer<MemoryUsageBroadcast> {
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(MemoryUsageDeserializer.class);
 
-    public MemoryUsageDeserializer() {
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(MemoryUsageDeserializer.class);
 
-    }
+  public MemoryUsageDeserializer() {
 
-    @Override
-    public MemoryUsageBroadcast deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
-        try {
-            MBeanServer server = ManagementFactory.getPlatformMBeanServer();
+  }
 
-            MemoryUsageBroadcast memoryUsageBroadcast = new MemoryUsageBroadcast();
-            JsonNode attributes = jsonParser.getCodec().readTree(jsonParser);
+  @Override
+  public MemoryUsageBroadcast deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
+    try {
+      MBeanServer server = ManagementFactory.getPlatformMBeanServer();
 
-            ObjectName name = new ObjectName(attributes.get("canonicalName").asText());
-            MBeanInfo info = server.getMBeanInfo(name);
-            memoryUsageBroadcast.setName(name.toString());
+      MemoryUsageBroadcast memoryUsageBroadcast = new MemoryUsageBroadcast();
+      JsonNode attributes = jsonParser.getCodec().readTree(jsonParser);
 
-            for(MBeanAttributeInfo attribute : Arrays.asList(info.getAttributes())) {
-                switch(attribute.getName()) {
-                    case "Verbose":
-                        memoryUsageBroadcast.setVerbose((boolean) server.getAttribute(name, attribute.getName()));
-                        break;
-                    case "ObjectPendingFinalizationCount":
-                        memoryUsageBroadcast.setObjectPendingFinalizationCount(Long.parseLong(server.getAttribute(name, attribute.getName()).toString()));
-                        break;
-                    case "HeapMemoryUsage":
-                        memoryUsageBroadcast.setHeapMemoryUsage((Long) ((CompositeDataSupport)server.getAttribute(name, attribute.getName())).get("used"));
-                        break;
-                    case "NonHeapMemoryUsage":
-                        memoryUsageBroadcast.setNonHeapMemoryUsage((Long) ((CompositeDataSupport)server.getAttribute(name, attribute.getName())).get("used"));
-                        break;
-                }
-            }
+      ObjectName name = new ObjectName(attributes.get("canonicalName").asText());
+      MBeanInfo info = server.getMBeanInfo(name);
+      memoryUsageBroadcast.setName(name.toString());
 
-            return memoryUsageBroadcast;
-        } catch (Exception e) {
-            LOGGER.error("Exception trying to deserialize MemoryUsageDeserializer object: {}", e);
-            return null;
+      for (MBeanAttributeInfo attribute : Arrays.asList(info.getAttributes())) {
+        switch (attribute.getName()) {
+          case "Verbose":
+            memoryUsageBroadcast.setVerbose((boolean) server.getAttribute(name, attribute.getName()));
+            break;
+          case "ObjectPendingFinalizationCount":
+            memoryUsageBroadcast.setObjectPendingFinalizationCount(Long.parseLong(server.getAttribute(name, attribute.getName()).toString()));
+            break;
+          case "HeapMemoryUsage":
+            memoryUsageBroadcast.setHeapMemoryUsage((Long) ((CompositeDataSupport)server.getAttribute(name, attribute.getName())).get("used"));
+            break;
+          case "NonHeapMemoryUsage":
+            memoryUsageBroadcast.setNonHeapMemoryUsage((Long) ((CompositeDataSupport)server.getAttribute(name, attribute.getName())).get("used"));
+            break;
+          default:
+            break;
         }
+      }
+
+      return memoryUsageBroadcast;
+    } catch (Exception ex) {
+      LOGGER.error("Exception trying to deserialize MemoryUsageDeserializer object: {}", ex);
+      return null;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-monitoring/src/main/java/org/apache/streams/jackson/StreamsTaskCounterDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-monitoring/src/main/java/org/apache/streams/jackson/StreamsTaskCounterDeserializer.java b/streams-monitoring/src/main/java/org/apache/streams/jackson/StreamsTaskCounterDeserializer.java
index 8b65bf3..e5f5dcb 100644
--- a/streams-monitoring/src/main/java/org/apache/streams/jackson/StreamsTaskCounterDeserializer.java
+++ b/streams-monitoring/src/main/java/org/apache/streams/jackson/StreamsTaskCounterDeserializer.java
@@ -15,74 +15,82 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.jackson;
 
+import org.apache.streams.pojo.json.StreamsTaskCounterBroadcast;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.JsonDeserializer;
 import com.fasterxml.jackson.databind.JsonNode;
-import org.apache.streams.pojo.json.StreamsTaskCounterBroadcast;
 import org.slf4j.Logger;
 
+import java.io.IOException;
+import java.lang.management.ManagementFactory;
+import java.util.Arrays;
 import javax.management.MBeanAttributeInfo;
 import javax.management.MBeanInfo;
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
-import java.io.IOException;
-import java.lang.management.ManagementFactory;
-import java.util.Arrays;
 
+/**
+ * StreamsTaskCounterDeserializer: a JsonDeserializer for StreamsTaskCounterBroadcast.
+ */
 public class StreamsTaskCounterDeserializer extends JsonDeserializer<StreamsTaskCounterBroadcast> {
-    private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(StreamsTaskCounterDeserializer.class);
 
-    public StreamsTaskCounterDeserializer() {
+  private static final Logger LOGGER = org.slf4j.LoggerFactory.getLogger(StreamsTaskCounterDeserializer.class);
 
-    }
+  public StreamsTaskCounterDeserializer() {
 
-    @Override
-    public StreamsTaskCounterBroadcast deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
-        try {
-            MBeanServer server = ManagementFactory.getPlatformMBeanServer();
+  }
 
-            StreamsTaskCounterBroadcast streamsTaskCounterBroadcast = new StreamsTaskCounterBroadcast();
-            JsonNode attributes = jsonParser.getCodec().readTree(jsonParser);
+  @Override
+  public StreamsTaskCounterBroadcast deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
+    try {
+      MBeanServer server = ManagementFactory.getPlatformMBeanServer();
 
-            ObjectName name = new ObjectName(attributes.get("canonicalName").asText());
-            MBeanInfo info = server.getMBeanInfo(name);
-            streamsTaskCounterBroadcast.setName(name.toString());
+      StreamsTaskCounterBroadcast streamsTaskCounterBroadcast = new StreamsTaskCounterBroadcast();
+      JsonNode attributes = jsonParser.getCodec().readTree(jsonParser);
 
-            for (MBeanAttributeInfo attribute : Arrays.asList(info.getAttributes())) {
-                try {
-                    switch (attribute.getName()) {
-                        case "ErrorRate":
-                            streamsTaskCounterBroadcast.setErrorRate((double) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "NumEmitted":
-                            streamsTaskCounterBroadcast.setNumEmitted((long) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "NumReceived":
-                            streamsTaskCounterBroadcast.setNumReceived((long) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "NumUnhandledErrors":
-                            streamsTaskCounterBroadcast.setNumUnhandledErrors((long) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "AvgTime":
-                            streamsTaskCounterBroadcast.setAvgTime((double) server.getAttribute(name, attribute.getName()));
-                            break;
-                        case "MaxTime":
-                            streamsTaskCounterBroadcast.setMaxTime((long) server.getAttribute(name, attribute.getName()));
-                            break;
-                    }
-                } catch (Exception e) {
-                    LOGGER.error("Exception while trying to deserialize StreamsTaskCounterBroadcast object: {}", e);
-                }
-            }
+      ObjectName name = new ObjectName(attributes.get("canonicalName").asText());
+      MBeanInfo info = server.getMBeanInfo(name);
+      streamsTaskCounterBroadcast.setName(name.toString());
 
-            return streamsTaskCounterBroadcast;
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to deserialize StreamsTaskCounterBroadcast object: {}", e);
-            return null;
+      for (MBeanAttributeInfo attribute : Arrays.asList(info.getAttributes())) {
+        try {
+          switch (attribute.getName()) {
+            case "ErrorRate":
+              streamsTaskCounterBroadcast.setErrorRate((double) server.getAttribute(name, attribute.getName()));
+              break;
+            case "NumEmitted":
+              streamsTaskCounterBroadcast.setNumEmitted((long) server.getAttribute(name, attribute.getName()));
+              break;
+            case "NumReceived":
+              streamsTaskCounterBroadcast.setNumReceived((long) server.getAttribute(name, attribute.getName()));
+              break;
+            case "NumUnhandledErrors":
+              streamsTaskCounterBroadcast.setNumUnhandledErrors((long) server.getAttribute(name, attribute.getName()));
+              break;
+            case "AvgTime":
+              streamsTaskCounterBroadcast.setAvgTime((double) server.getAttribute(name, attribute.getName()));
+              break;
+            case "MaxTime":
+              streamsTaskCounterBroadcast.setMaxTime((long) server.getAttribute(name, attribute.getName()));
+              break;
+            default:
+              break;
+          }
+        } catch (Exception ex) {
+          LOGGER.error("Exception while trying to deserialize StreamsTaskCounterBroadcast object: {}", ex);
         }
+      }
+
+      return streamsTaskCounterBroadcast;
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to deserialize StreamsTaskCounterBroadcast object: {}", ex);
+      return null;
     }
+  }
 }


[24/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProvider.java b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProvider.java
index d7dc918..9de1863 100644
--- a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProvider.java
+++ b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProvider.java
@@ -18,14 +18,6 @@
 
 package org.apache.streams.rss.provider;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.Uninterruptibles;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
@@ -37,6 +29,15 @@ import org.apache.streams.rss.FeedDetails;
 import org.apache.streams.rss.RssStreamConfiguration;
 import org.apache.streams.rss.provider.perpetual.RssFeedScheduler;
 import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.Uninterruptibles;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -61,173 +62,159 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 /**
  * RSS {@link org.apache.streams.core.StreamsProvider} that provides content from rss feeds in boilerpipe format
- *
- *  To use from command line:
- *
- *  Supply configuration similar to src/test/resources/rss.conf
- *
- *  Launch using:
- *
- *  mvn exec:java -Dexec.mainClass=org.apache.streams.rss.provider.RssStreamProvider -Dexec.args="rss.conf articles.json"
  */
 public class RssStreamProvider implements StreamsProvider {
 
-    public static final String STREAMS_ID = "RssStreamProvider";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(RssStreamProvider.class);
-
-    private final static int MAX_SIZE = 1000;
-
-    private RssStreamConfiguration config;
-    private boolean perpetual;
-    private ExecutorService executor;
-    private BlockingQueue<StreamsDatum> dataQueue;
-    private AtomicBoolean isComplete;
-
-    @VisibleForTesting
-    protected RssFeedScheduler scheduler;
-
-    public RssStreamProvider() {
-        this(new ComponentConfigurator<>(RssStreamConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("rss")), false);
-    }
-
-    public RssStreamProvider(boolean perpetual) {
-        this(new ComponentConfigurator<>(RssStreamConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("rss")), perpetual);
-    }
-
-    public RssStreamProvider(RssStreamConfiguration config) {
-        this(config, false);
-    }
-
-    public RssStreamProvider(RssStreamConfiguration config, boolean perpetual) {
-        this.perpetual = perpetual;
-        this.config = config;
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    public void setConfig(RssStreamConfiguration config) {
-        this.config = config;
-    }
-
-    public void setRssFeeds(Set<String> urlFeeds) {
-    }
-
-    public void setRssFeeds(Map<String, Long> feeds) {
-        if(this.config == null) {
-            this.config = new RssStreamConfiguration();
-        }
-        List<FeedDetails> feedDetails = new ArrayList<>();
-        for(String feed : feeds.keySet()) {
-            Long delay = feeds.get(feed);
-            FeedDetails detail = new FeedDetails();
-            detail.setUrl(feed);
-            detail.setPollIntervalMillis(delay);
-            feedDetails.add(detail);
-        }
-        this.config.setFeeds(feedDetails);
-    }
-
-    @Override
-    public void startStream() {
-        LOGGER.trace("Starting Rss Scheduler");
-        this.executor.submit(this.scheduler);
-    }
-
-    @Override
-    public StreamsResultSet readCurrent() {
-        Queue<StreamsDatum> batch = new ConcurrentLinkedQueue<>();
-        int batchSize = 0;
-        while(!this.dataQueue.isEmpty() && batchSize < MAX_SIZE) {
-            StreamsDatum datum = ComponentUtils.pollWhileNotEmpty(this.dataQueue);
-            if(datum != null) {
-                ++batchSize;
-                batch.add(datum);
-            }
+  public static final String STREAMS_ID = "RssStreamProvider";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(RssStreamProvider.class);
+
+  private static final int MAX_SIZE = 1000;
+
+  private RssStreamConfiguration config;
+  private boolean perpetual;
+  private ExecutorService executor;
+  private BlockingQueue<StreamsDatum> dataQueue;
+  private AtomicBoolean isComplete;
+
+  @VisibleForTesting
+  protected RssFeedScheduler scheduler;
+
+  public RssStreamProvider() {
+    this(new ComponentConfigurator<>(RssStreamConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("rss")), false);
+  }
+
+  public RssStreamProvider(boolean perpetual) {
+    this(new ComponentConfigurator<>(RssStreamConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("rss")), perpetual);
+  }
+
+  public RssStreamProvider(RssStreamConfiguration config) {
+    this(config, false);
+  }
+
+  public RssStreamProvider(RssStreamConfiguration config, boolean perpetual) {
+    this.perpetual = perpetual;
+    this.config = config;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+    LOGGER.trace("Starting Rss Scheduler");
+    this.executor.submit(this.scheduler);
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    Queue<StreamsDatum> batch = new ConcurrentLinkedQueue<>();
+    int batchSize = 0;
+    while (!this.dataQueue.isEmpty() && batchSize < MAX_SIZE) {
+      StreamsDatum datum = ComponentUtils.pollWhileNotEmpty(this.dataQueue);
+      if (datum != null) {
+        ++batchSize;
+        batch.add(datum);
+      }
+    }
+    this.isComplete.set(this.scheduler.isComplete() && batch.isEmpty() && this.dataQueue.isEmpty());
+    return new StreamsResultSet(batch);
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return !this.isComplete.get();
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.executor = new ThreadPoolExecutor(1, 4, 15L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
+    this.dataQueue = new LinkedBlockingQueue<>();
+    this.scheduler = getScheduler(this.dataQueue);
+    this.isComplete = new AtomicBoolean(false);
+    int consecutiveEmptyReads = 0;
+  }
+
+  @VisibleForTesting
+  protected RssFeedScheduler getScheduler(BlockingQueue<StreamsDatum> queue) {
+    if (this.perpetual) {
+      return new RssFeedScheduler(this.executor, this.config.getFeeds(), queue);
+    } else {
+      return new RssFeedScheduler(this.executor, this.config.getFeeds(), queue, 0);
+    }
+  }
+
+  @Override
+  public void cleanUp() {
+    this.scheduler.stop();
+    ComponentUtils.shutdownExecutor(this.executor, 10, 10);
+  }
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply configuration similar to src/test/resources/rss.conf
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.rss.provider.RssStreamProvider -Dexec.args="rss.conf articles.json"
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    RssStreamConfiguration config = new ComponentConfigurator<>(RssStreamConfiguration.class).detectConfiguration(typesafe, "rss");
+    RssStreamProvider provider = new RssStreamProvider(config);
+
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      for (StreamsDatum datum : provider.readCurrent()) {
+        String json;
+        try {
+          json = mapper.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
         }
-        this.isComplete.set(this.scheduler.isComplete() && batch.isEmpty() && this.dataQueue.isEmpty());
-        return new StreamsResultSet(batch);
-    }
-
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
-
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
-
-    @Override
-    public boolean isRunning() {
-        return !this.isComplete.get();
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        this.executor = new ThreadPoolExecutor(1, 4, 15L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
-        this.dataQueue = new LinkedBlockingQueue<>();
-        this.scheduler = getScheduler(this.dataQueue);
-        this.isComplete = new AtomicBoolean(false);
-        int consecutiveEmptyReads = 0;
-    }
-
-    @VisibleForTesting
-    protected RssFeedScheduler getScheduler(BlockingQueue<StreamsDatum> queue) {
-        if(this.perpetual)
-            return new RssFeedScheduler(this.executor, this.config.getFeeds(), queue);
-        else
-            return new RssFeedScheduler(this.executor, this.config.getFeeds(), queue, 0);
-    }
-
-    @Override
-    public void cleanUp() {
-        this.scheduler.stop();
-        ComponentUtils.shutdownExecutor(this.executor, 10, 10);
-    }
-
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        RssStreamConfiguration config = new ComponentConfigurator<>(RssStreamConfiguration.class).detectConfiguration(typesafe, "rss");
-        RssStreamProvider provider = new RssStreamProvider(config);
-
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            for (StreamsDatum datum : provider.readCurrent()) {
-                String json;
-                try {
-                    json = mapper.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+      }
     }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProviderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProviderTask.java b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProviderTask.java
index 3800a51..03a66d1 100644
--- a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProviderTask.java
+++ b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/RssStreamProviderTask.java
@@ -18,27 +18,23 @@
 
 package org.apache.streams.rss.provider;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.data.util.RFC3339Utils;
+import org.apache.streams.rss.serializer.SyndEntrySerializer;
+
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
 import com.sun.syndication.feed.synd.SyndEntry;
 import com.sun.syndication.feed.synd.SyndFeed;
 import com.sun.syndication.io.FeedException;
 import com.sun.syndication.io.SyndFeedInput;
-import com.sun.syndication.io.XmlReader;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.data.util.RFC3339Utils;
-import org.apache.streams.rss.FeedDetails;
-import org.apache.streams.rss.serializer.SyndEntrySerializer;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.net.MalformedURLException;
 import java.net.URL;
 import java.net.URLConnection;
 import java.util.Collections;
@@ -50,15 +46,19 @@ import java.util.concurrent.ConcurrentHashMap;
 /**
  * A {@link java.lang.Runnable} task that queues rss feed data.
  *
+ * <p/>
  * <code>RssStreamProviderTask</code> reads the content of an rss feed and queues the articles from
  * the feed inform of a {@link com.fasterxml.jackson.databind.node.ObjectNode} wrapped in a {@link org.apache.streams.core.StreamsDatum}.
  * The task can filter articles by a published date.  If the task cannot parse the date of the article or the article does not contain a
  * published date, by default the task will attempt to queue article.
  *
- * A task can be run in perpetual mode which will store the article urls in a static variable.  The next time a <code>RssStreamProviderTask</code>
- * is run, it will not queue data that was seen the previous time the rss feed was read.  This is an attempt to reduce
- * multiple copies of an article from being out put by a {@link org.apache.streams.rss.provider.RssStreamProvider}.
+ * <p/>
+ * A task can be run in perpetual mode which will store the article urls in a static variable.  The next time a
+ * <code>RssStreamProviderTask</code> is run, it will not queue data that was seen the previous time the rss feed was read.
+ * This is an attempt to reduce multiple copies of an article from being output by a
+ * {@link org.apache.streams.rss.provider.RssStreamProvider}.
  *
+ * <p/>
  * ** Warning! **
  * It still is possible to output multiples of the same article.  If multiple tasks executions for the same rss feed overlap
  * in execution time, it possible that the previously seen articles static variable will not have been updated in time.
@@ -66,183 +66,187 @@ import java.util.concurrent.ConcurrentHashMap;
  */
 public class RssStreamProviderTask implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(RssStreamProviderTask.class);
-    private static final int DEFAULT_TIME_OUT = 10000; // 10 seconds
-    private static final String RSS_KEY = "rssFeed";
-    private static final String URI_KEY = "uri";
-    private static final String LINK_KEY = "link";
-    private static final String DATE_KEY = "publishedDate";
-
-    /**
-     * Map that contains the Set of previously seen articles by an rss feed.
-     */
-    @VisibleForTesting
-    protected static final Map<String, Set<String>> PREVIOUSLY_SEEN = new ConcurrentHashMap<>();
-
-
-    private BlockingQueue<StreamsDatum> dataQueue;
-    private String rssFeed;
-    private int timeOut;
-    private SyndEntrySerializer serializer;
-    private DateTime publishedSince;
-    private boolean perpetual;
-
-
-    /**
-     * Non-perpetual mode, no date filter, time out of 10 sec
-     * @see {@link org.apache.streams.rss.provider.RssStreamProviderTask#RssStreamProviderTask(java.util.concurrent.BlockingQueue, String, org.joda.time.DateTime, int, boolean)}
-     * @param queue
-     * @param rssFeed
-     */
-    public RssStreamProviderTask(BlockingQueue<StreamsDatum> queue, String rssFeed) {
-        this(queue, rssFeed, new DateTime().minusYears(30), DEFAULT_TIME_OUT, false);
-    }
-
-    /**
-     * Non-perpetual mode, no date filter
-     * @see {@link org.apache.streams.rss.provider.RssStreamProviderTask#RssStreamProviderTask(java.util.concurrent.BlockingQueue, String, org.joda.time.DateTime, int, boolean)}
-     * @param queue
-     * @param rssFeed
-     * @param timeOut
-     */
-    public RssStreamProviderTask(BlockingQueue<StreamsDatum> queue, String rssFeed, int timeOut) {
-        this(queue, rssFeed, new DateTime().minusYears(30), timeOut, false);
-    }
-
-    /**
-     * Non-perpetual mode, time out of 10 sec
-     * @see {@link org.apache.streams.rss.provider.RssStreamProviderTask#RssStreamProviderTask(java.util.concurrent.BlockingQueue, String, org.joda.time.DateTime, int, boolean)}
-     * @param queue
-     * @param rssFeed
-     * @param publishedSince
-     */
-    public RssStreamProviderTask(BlockingQueue<StreamsDatum> queue, String rssFeed, DateTime publishedSince) {
-        this(queue, rssFeed, publishedSince, DEFAULT_TIME_OUT, false);
-    }
-
-    /**
-     * RssStreamProviderTask that reads an rss feed url and queues the resulting articles as StreamsDatums with the documents
-     * being object nodes.
-     * @param queue Queue to push data to
-     * @param rssFeed url of rss feed to read
-     * @param publishedSince DateTime to filter articles by, will queue articles with published times after this
-     * @param timeOut url connection timeout in milliseconds
-     * @param perpetual true, if you want to run in perpetual mode. NOT RECOMMENDED
-     */
-    public RssStreamProviderTask(BlockingQueue<StreamsDatum> queue, String rssFeed, DateTime publishedSince, int timeOut, boolean perpetual) {
-        this.dataQueue = queue;
-        this.rssFeed = rssFeed;
-        this.timeOut = timeOut;
-        this.publishedSince = publishedSince;
-        this.serializer = new SyndEntrySerializer();
-        this.perpetual = perpetual;
-    }
-
-    /**
-     * The rss feed url that this task is responsible for reading
-     * @return rss feed url
-     */
-    public String getRssFeed() {
-        return this.rssFeed;
-    }
-
-    @Override
-    public void run() {
-        try {
-            Set<String> batch = queueFeedEntries(new URL(this.rssFeed));
-            if(this.perpetual)
-                PREVIOUSLY_SEEN.put(this.getRssFeed(), batch);
-        } catch (IOException | FeedException e) {
-            LOGGER.warn("Exception while reading rss stream, {} : {}", this.rssFeed, e);
-        }
+  private static final Logger LOGGER = LoggerFactory.getLogger(RssStreamProviderTask.class);
+  private static final int DEFAULT_TIME_OUT = 10000; // 10 seconds
+  private static final String RSS_KEY = "rssFeed";
+  private static final String URI_KEY = "uri";
+  private static final String LINK_KEY = "link";
+  private static final String DATE_KEY = "publishedDate";
+
+  /**
+   * Map that contains the Set of previously seen articles by an rss feed.
+   */
+  @VisibleForTesting
+  protected static final Map<String, Set<String>> PREVIOUSLY_SEEN = new ConcurrentHashMap<>();
+
+
+  private BlockingQueue<StreamsDatum> dataQueue;
+  private String rssFeed;
+  private int timeOut;
+  private SyndEntrySerializer serializer;
+  private DateTime publishedSince;
+  private boolean perpetual;
+
+
+  /**
+   * Non-perpetual mode, no date filter, time out of 10 sec
+   * @see {@link org.apache.streams.rss.provider.RssStreamProviderTask
+   * #RssStreamProviderTask(java.util.concurrent.BlockingQueue, String, org.joda.time.DateTime, int, boolean)}
+   * @param queue queue
+   * @param rssFeed rssFeed
+   */
+  public RssStreamProviderTask(BlockingQueue<StreamsDatum> queue, String rssFeed) {
+    this(queue, rssFeed, new DateTime().minusYears(30), DEFAULT_TIME_OUT, false);
+  }
+
+  /**
+   * Non-perpetual mode, no date filter.
+   * @see {@link org.apache.streams.rss.provider.RssStreamProviderTask
+   * #RssStreamProviderTask(java.util.concurrent.BlockingQueue, String, org.joda.time.DateTime, int, boolean)}
+   * @param queue queue
+   * @param rssFeed rssFeed
+   * @param timeOut timeOut
+   */
+  public RssStreamProviderTask(BlockingQueue<StreamsDatum> queue, String rssFeed, int timeOut) {
+    this(queue, rssFeed, new DateTime().minusYears(30), timeOut, false);
+  }
+
+  /**
+   * Non-perpetual mode, time out of 10 sec
+   * @see {@link org.apache.streams.rss.provider.RssStreamProviderTask
+   * #RssStreamProviderTask(java.util.concurrent.BlockingQueue, String, org.joda.time.DateTime, int, boolean)}
+   * @param queue queue
+   * @param rssFeed rssFeed
+   * @param publishedSince publishedSince
+   */
+  public RssStreamProviderTask(BlockingQueue<StreamsDatum> queue, String rssFeed, DateTime publishedSince) {
+    this(queue, rssFeed, publishedSince, DEFAULT_TIME_OUT, false);
+  }
+
+  /**
+   * RssStreamProviderTask that reads an rss feed url and queues the resulting articles as StreamsDatums with the documents
+   * being object nodes.
+   * @param queue Queue to push data to
+   * @param rssFeed url of rss feed to read
+   * @param publishedSince DateTime to filter articles by, will queue articles with published times after this
+   * @param timeOut url connection timeout in milliseconds
+   * @param perpetual true, if you want to run in perpetual mode. NOT RECOMMENDED
+   */
+  public RssStreamProviderTask(BlockingQueue<StreamsDatum> queue, String rssFeed, DateTime publishedSince, int timeOut, boolean perpetual) {
+    this.dataQueue = queue;
+    this.rssFeed = rssFeed;
+    this.timeOut = timeOut;
+    this.publishedSince = publishedSince;
+    this.serializer = new SyndEntrySerializer();
+    this.perpetual = perpetual;
+  }
+
+  /**
+   * The rss feed url that this task is responsible for reading.
+   * @return rss feed url
+   */
+  public String getRssFeed() {
+    return this.rssFeed;
+  }
+
+  @Override
+  public void run() {
+    try {
+      Set<String> batch = queueFeedEntries(new URL(this.rssFeed));
+      if (this.perpetual) {
+        PREVIOUSLY_SEEN.put(this.getRssFeed(), batch);
+      }
+    } catch (IOException | FeedException ex) {
+      LOGGER.warn("Exception while reading rss stream, {} : {}", this.rssFeed, ex);
     }
-
-    /**
-     * Reads the url and queues the data
-     * @param feedUrl rss feed url
-     * @return set of all article urls that were read from the feed
-     * @throws IOException when it cannot connect to the url or the url is malformed
-     * @throws FeedException when it cannot reed the feed.
-     */
-    @VisibleForTesting
-    protected Set<String> queueFeedEntries(URL feedUrl) throws IOException, FeedException {
-
-        // ConcurrentHashSet is preferable, but it's only in guava 15+
-        // spark 1.5.0 uses guava 14 so for the moment this is the workaround
-        Set<String> batch = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
-        URLConnection connection = feedUrl.openConnection();
-        connection.setConnectTimeout(this.timeOut);
-        connection.setConnectTimeout(this.timeOut);
-        SyndFeedInput input = new SyndFeedInput();
-        SyndFeed feed = input.build(new InputStreamReader(connection.getInputStream()));
-        for (Object entryObj : feed.getEntries()) {
-            SyndEntry entry = (SyndEntry) entryObj;
-            ObjectNode nodeEntry = this.serializer.deserialize(entry);
-            nodeEntry.put(RSS_KEY, this.rssFeed);
-            String entryId = determineId(nodeEntry);
-            batch.add(entryId);
-            StreamsDatum datum = new StreamsDatum(nodeEntry);
-            try {
-                JsonNode published = nodeEntry.get(DATE_KEY);
-                if (published != null) {
-                    try {
-                        DateTime date = RFC3339Utils.parseToUTC(published.asText());
-                        if (date.isAfter(this.publishedSince) && (!this.perpetual || !seenBefore(entryId, this.rssFeed))) {
-                            this.dataQueue.put(datum);
-                            LOGGER.debug("Added entry, {}, to provider queue.", entryId);
-                        }
-                    } catch (InterruptedException ie) {
-                        Thread.currentThread().interrupt();
-                    } catch (Exception e) {
-                        LOGGER.trace("Failed to parse date from object node, attempting to add node to queue by default.");
-                        if(!this.perpetual || !seenBefore(entryId, this.rssFeed)) {
-                            this.dataQueue.put(datum);
-                            LOGGER.debug("Added entry, {}, to provider queue.", entryId);
-                        }
-                    }
-                } else {
-                    LOGGER.debug("No published date present, attempting to add node to queue by default.");
-                    if(!this.perpetual || !seenBefore(entryId, this.rssFeed)) {
-                        this.dataQueue.put(datum);
-                        LOGGER.debug("Added entry, {}, to provider queue.", entryId);
-                    }
-                }
-            } catch (InterruptedException ie) {
-                LOGGER.error("Interupted Exception.");
-                Thread.currentThread().interrupt();
+  }
+
+  /**
+   * Reads the url and queues the data
+   * @param feedUrl rss feed url
+   * @return set of all article urls that were read from the feed
+   * @throws IOException when it cannot connect to the url or the url is malformed
+   * @throws FeedException when it cannot reed the feed.
+   */
+  @VisibleForTesting
+  protected Set<String> queueFeedEntries(URL feedUrl) throws IOException, FeedException {
+
+    // ConcurrentHashSet is preferable, but it's only in guava 15+
+    // spark 1.5.0 uses guava 14 so for the moment this is the workaround
+    Set<String> batch = Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
+    URLConnection connection = feedUrl.openConnection();
+    connection.setConnectTimeout(this.timeOut);
+    connection.setConnectTimeout(this.timeOut);
+    SyndFeedInput input = new SyndFeedInput();
+    SyndFeed feed = input.build(new InputStreamReader(connection.getInputStream()));
+    for (Object entryObj : feed.getEntries()) {
+      SyndEntry entry = (SyndEntry) entryObj;
+      ObjectNode nodeEntry = this.serializer.deserialize(entry);
+      nodeEntry.put(RSS_KEY, this.rssFeed);
+      String entryId = determineId(nodeEntry);
+      batch.add(entryId);
+      StreamsDatum datum = new StreamsDatum(nodeEntry);
+      try {
+        JsonNode published = nodeEntry.get(DATE_KEY);
+        if (published != null) {
+          try {
+            DateTime date = RFC3339Utils.parseToUTC(published.asText());
+            if (date.isAfter(this.publishedSince) && (!this.perpetual || !seenBefore(entryId, this.rssFeed))) {
+              this.dataQueue.put(datum);
+              LOGGER.debug("Added entry, {}, to provider queue.", entryId);
+            }
+          } catch (InterruptedException ie) {
+            Thread.currentThread().interrupt();
+          } catch (Exception ex) {
+            LOGGER.trace("Failed to parse date from object node, attempting to add node to queue by default.");
+            if (!this.perpetual || !seenBefore(entryId, this.rssFeed)) {
+              this.dataQueue.put(datum);
+              LOGGER.debug("Added entry, {}, to provider queue.", entryId);
             }
+          }
+        } else {
+          LOGGER.debug("No published date present, attempting to add node to queue by default.");
+          if (!this.perpetual || !seenBefore(entryId, this.rssFeed)) {
+            this.dataQueue.put(datum);
+            LOGGER.debug("Added entry, {}, to provider queue.", entryId);
+          }
         }
-        return batch;
+      } catch (InterruptedException ie) {
+        LOGGER.error("Interupted Exception.");
+        Thread.currentThread().interrupt();
+      }
     }
-
-    /**
-     * Returns a link to the article to use as the id
-     * @param node
-     * @return
-     */
-    private String determineId(ObjectNode node) {
-        String id = null;
-        if(node.get(URI_KEY) != null && !node.get(URI_KEY).textValue().equals("")) {
-            id = node.get(URI_KEY).textValue();
-        } else if(node.get(LINK_KEY) != null && !node.get(LINK_KEY).textValue().equals("")) {
-            id = node.get(LINK_KEY).textValue();
-        }
-        return id;
+    return batch;
+  }
+
+  /**
+   * Returns link to the article to use as the id.
+   * @param node node
+   * @return String
+   */
+  private String determineId(ObjectNode node) {
+    String id = null;
+    if (node.get(URI_KEY) != null && !node.get(URI_KEY).textValue().equals("")) {
+      id = node.get(URI_KEY).textValue();
+    } else if (node.get(LINK_KEY) != null && !node.get(LINK_KEY).textValue().equals("")) {
+      id = node.get(LINK_KEY).textValue();
     }
-
-    /**
-     * Returns false if the artile was previously seen in another task for this feed
-     * @param id
-     * @param rssFeed
-     * @return
-     */
-    private boolean seenBefore(String id, String rssFeed) {
-        Set<String> previousBatch = PREVIOUSLY_SEEN.get(rssFeed);
-        if(previousBatch == null) {
-            return false;
-        }
-        return previousBatch.contains(id);
+    return id;
+  }
+
+  /**
+   * Returns false if the artile was previously seen in another task for this feed.
+   * @param id id
+   * @param rssFeed rssFeed
+   * @return boolean seenBefore
+   */
+  private boolean seenBefore(String id, String rssFeed) {
+    Set<String> previousBatch = PREVIOUSLY_SEEN.get(rssFeed);
+    if (previousBatch == null) {
+      return false;
     }
+    return previousBatch.contains(id);
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/perpetual/RssFeedScheduler.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/perpetual/RssFeedScheduler.java b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/perpetual/RssFeedScheduler.java
index 99ccbf3..e4bfd35 100644
--- a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/perpetual/RssFeedScheduler.java
+++ b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/provider/perpetual/RssFeedScheduler.java
@@ -15,12 +15,14 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.rss.provider.perpetual;
 
-import com.google.common.collect.Maps;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.rss.FeedDetails;
 import org.apache.streams.rss.provider.RssStreamProviderTask;
+
+import com.google.common.collect.Maps;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -31,82 +33,92 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 /**
- *
+ * RssFeedScheduler launches threads to collect data from rss feeds.
  */
 public class RssFeedScheduler implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(RssFeedScheduler.class);
-    private static final int DEFAULT_PEROID = 10; // 1 minute
+  private static final Logger LOGGER = LoggerFactory.getLogger(RssFeedScheduler.class);
+  private static final int DEFAULT_PEROID = 10; // 1 minute
 
-    private ExecutorService service;
-    private List<FeedDetails> feedDetailsList;
-    private int peroid;
-    private AtomicBoolean keepRunning;
-    private AtomicBoolean complete;
-    private Map<String, Long> lastScheduled;
-    private BlockingQueue<StreamsDatum> dataQueue;
+  private ExecutorService service;
+  private List<FeedDetails> feedDetailsList;
+  private int peroid;
+  private AtomicBoolean keepRunning;
+  private AtomicBoolean complete;
+  private Map<String, Long> lastScheduled;
+  private BlockingQueue<StreamsDatum> dataQueue;
 
-    public RssFeedScheduler(ExecutorService service, List<FeedDetails> feedDetailsList, BlockingQueue<StreamsDatum> dataQueue) {
-        this(service, feedDetailsList, dataQueue,  DEFAULT_PEROID);
-    }
+  public RssFeedScheduler(ExecutorService service, List<FeedDetails> feedDetailsList, BlockingQueue<StreamsDatum> dataQueue) {
+    this(service, feedDetailsList, dataQueue,  DEFAULT_PEROID);
+  }
 
-    public RssFeedScheduler(ExecutorService service, List<FeedDetails> feedDetailsList, BlockingQueue<StreamsDatum> dataQueue, int peroid) {
-        this.service = service;
-        this.feedDetailsList = feedDetailsList;
-        this.peroid = peroid;
-        this.keepRunning = new AtomicBoolean(true);
-        this.lastScheduled = Maps.newHashMap();
-        this.dataQueue = dataQueue;
-        this.complete = new AtomicBoolean(false);
-    }
+  /**
+   * RssFeedScheduler constructor.
+   * @param service service
+   * @param feedDetailsList feedDetailsList
+   * @param dataQueue dataQueue
+   * @param peroid peroid
+   */
+  public RssFeedScheduler(ExecutorService service, List<FeedDetails> feedDetailsList, BlockingQueue<StreamsDatum> dataQueue, int peroid) {
+    this.service = service;
+    this.feedDetailsList = feedDetailsList;
+    this.peroid = peroid;
+    this.keepRunning = new AtomicBoolean(true);
+    this.lastScheduled = Maps.newHashMap();
+    this.dataQueue = dataQueue;
+    this.complete = new AtomicBoolean(false);
+  }
 
-    public void stop() {
-        this.keepRunning.set(false);
-    }
+  public void stop() {
+    this.keepRunning.set(false);
+  }
 
-    public boolean isComplete() {
-        return this.complete.get();
-    }
+  public boolean isComplete() {
+    return this.complete.get();
+  }
 
-    @Override
-    public void run() {
-        this.complete.set(false);
-        try {
-            if(this.peroid <= 0) {
-                scheduleFeeds();
-            } else {
-                while (this.keepRunning.get()) {
-                    scheduleFeeds();
-                    Thread.sleep(this.peroid * 60000);
-                }
-            }
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        } finally {
-            this.service = null;
-            LOGGER.info("{} completed scheduling of feeds.", this.getClass().getName());
-            this.complete.set(true);
+  @Override
+  public void run() {
+    this.complete.set(false);
+    try {
+      if (this.peroid <= 0) {
+        scheduleFeeds();
+      } else {
+        while (this.keepRunning.get()) {
+          scheduleFeeds();
+          Thread.sleep(this.peroid * 60000);
         }
+      }
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
+    } finally {
+      this.service = null;
+      LOGGER.info("{} completed scheduling of feeds.", this.getClass().getName());
+      this.complete.set(true);
     }
+  }
 
-    public void scheduleFeeds() {
-        for(FeedDetails detail : this.feedDetailsList) {
-            Long lastTime = null;
-            if((lastTime = this.lastScheduled.get(detail.getUrl())) == null) {
-                lastTime = 0L;
-            }
-            long currentTime = System.currentTimeMillis();
-            long pollInterval;
-            if(detail.getPollIntervalMillis() == null) {
-                pollInterval = 0;
-            } else {
-                pollInterval = detail.getPollIntervalMillis();
-            }
-            if(currentTime - lastTime > pollInterval) {
-                this.service.execute(new RssStreamProviderTask(this.dataQueue, detail.getUrl()));
-                this.LOGGER.trace("Scheduled data collection on rss feed, {}", detail.getUrl());
-                this.lastScheduled.put(detail.getUrl(), currentTime);
-            }
-        }
+  /**
+   * Schedule Feeds.
+   */
+  public void scheduleFeeds() {
+    for (FeedDetails detail : this.feedDetailsList) {
+      Long lastTime = null;
+      if ((lastTime = this.lastScheduled.get(detail.getUrl())) == null) {
+        lastTime = 0L;
+      }
+      long currentTime = System.currentTimeMillis();
+      long pollInterval;
+      if (detail.getPollIntervalMillis() == null) {
+        pollInterval = 0;
+      } else {
+        pollInterval = detail.getPollIntervalMillis();
+      }
+      if (currentTime - lastTime > pollInterval) {
+        this.service.execute(new RssStreamProviderTask(this.dataQueue, detail.getUrl()));
+        this.LOGGER.trace("Scheduled data collection on rss feed, {}", detail.getUrl());
+        this.lastScheduled.put(detail.getUrl(), currentTime);
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntryActivitySerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntryActivitySerializer.java b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntryActivitySerializer.java
index e323f27..1e3aedd 100644
--- a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntryActivitySerializer.java
+++ b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntryActivitySerializer.java
@@ -18,11 +18,6 @@
 
 package org.apache.streams.rss.serializer;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.JsonNodeFactory;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Preconditions;
 import org.apache.streams.data.ActivitySerializer;
 import org.apache.streams.data.util.RFC3339Utils;
 import org.apache.streams.jackson.StreamsJacksonMapper;
@@ -30,6 +25,12 @@ import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Author;
 import org.apache.streams.pojo.json.Provider;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Preconditions;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.slf4j.Logger;
@@ -40,194 +41,200 @@ import java.util.List;
 
 public class SyndEntryActivitySerializer implements ActivitySerializer<ObjectNode> {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(SyndEntryActivitySerializer.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SyndEntryActivitySerializer.class);
 
-    private boolean includeRomeExtension;
+  private boolean includeRomeExtension;
 
-    public SyndEntryActivitySerializer() {
-        this(true);
-    }
-
-    public SyndEntryActivitySerializer(boolean includeRomeExtension) {
-        this.includeRomeExtension = includeRomeExtension;
-    }
+  public SyndEntryActivitySerializer() {
+    this(true);
+  }
 
+  public SyndEntryActivitySerializer(boolean includeRomeExtension) {
+    this.includeRomeExtension = includeRomeExtension;
+  }
 
-    @Override
-    public List<Activity> deserializeAll(List<ObjectNode> objectNodes) {
-        List<Activity> result = new LinkedList<>();
-        for (ObjectNode node : objectNodes) {
-            result.add(deserialize(node));
-        }
-        return result;
+  @Override
+  public List<Activity> deserializeAll(List<ObjectNode> objectNodes) {
+    List<Activity> result = new LinkedList<>();
+    for (ObjectNode node : objectNodes) {
+      result.add(deserialize(node));
     }
-
-    @Override
-    public String serializationFormat() {
-        return "application/streams-provider-rss";
+    return result;
+  }
+
+  @Override
+  public String serializationFormat() {
+    return "application/streams-provider-rss";
+  }
+
+  @Override
+  public ObjectNode serialize(Activity deserialized) {
+    throw new UnsupportedOperationException("Cannot currently serialize to Rome");
+  }
+
+  @Override
+  public Activity deserialize(ObjectNode syndEntry) {
+    return deserializeWithRomeExtension(syndEntry, this.includeRomeExtension);
+  }
+
+  /**
+   * deserializeWithRomeExtension ObjectNode entry withExtension.
+   * @param entry ObjectNode
+   * @param withExtension whether to add Rome Extension
+   * @return Activity
+   */
+  public Activity deserializeWithRomeExtension(ObjectNode entry, boolean withExtension) {
+    Preconditions.checkNotNull(entry);
+
+    Activity activity = new Activity();
+    Provider provider = buildProvider(entry);
+    ActivityObject actor = buildActor(entry);
+    ActivityObject activityObject = buildActivityObject(entry);
+
+    activityObject.setUrl(provider.getUrl());
+    activityObject.setAuthor(actor.getAuthor());
+
+    activity.setUrl(provider.getUrl());
+    activity.setProvider(provider);
+    activity.setActor(actor);
+    activity.setVerb("post");
+    activity.setId("id:rss:post:" + activity.getUrl());
+
+    JsonNode published = entry.get("publishedDate");
+    if (published != null) {
+      try {
+        activity.setPublished(RFC3339Utils.parseToUTC(published.textValue()));
+      } catch (Exception ex) {
+        LOGGER.warn("Failed to parse date : {}", published.textValue());
+
+        DateTime now = DateTime.now().withZone(DateTimeZone.UTC);
+        activity.setPublished(now);
+      }
     }
 
-    @Override
-    public ObjectNode serialize(Activity deserialized) {
-        throw new UnsupportedOperationException("Cannot currently serialize to Rome");
-    }
+    activity.setUpdated(activityObject.getUpdated());
+    activity.setObject(activityObject);
 
-    @Override
-    public Activity deserialize(ObjectNode syndEntry) {
-        return deserializeWithRomeExtension(syndEntry, this.includeRomeExtension);
+    if (withExtension) {
+      activity = addRomeExtension(activity, entry);
     }
 
-    public Activity deserializeWithRomeExtension(ObjectNode entry, boolean withExtension) {
-        Preconditions.checkNotNull(entry);
-
-        Activity activity = new Activity();
-        Provider provider = buildProvider(entry);
-        ActivityObject actor = buildActor(entry);
-        ActivityObject activityObject = buildActivityObject(entry);
-
-        activityObject.setUrl(provider.getUrl());
-        activityObject.setAuthor(actor.getAuthor());
-
-        activity.setUrl(provider.getUrl());
-        activity.setProvider(provider);
-        activity.setActor(actor);
-        activity.setVerb("post");
-        activity.setId("id:rss:post:" + activity.getUrl());
-
-        JsonNode published = entry.get("publishedDate");
-        if (published != null) {
-            try {
-                activity.setPublished(RFC3339Utils.parseToUTC(published.textValue()));
-            } catch (Exception e) {
-                LOGGER.warn("Failed to parse date : {}", published.textValue());
-
-                DateTime now = DateTime.now().withZone(DateTimeZone.UTC);
-                activity.setPublished(now);
-            }
-        }
-
-        activity.setUpdated(activityObject.getUpdated());
-        activity.setObject(activityObject);
-
-        if (withExtension) {
-            activity = addRomeExtension(activity, entry);
-        }
-
-        return activity;
+    return activity;
+  }
+
+  /**
+   * Given an RSS entry, extra out the author and actor information and return it
+   * in an actor object
+   *
+   * @param entry entry
+   * @return $.actor
+   */
+  private ActivityObject buildActor(ObjectNode entry) {
+    ActivityObject actor = new ActivityObject();
+    Author author = new Author();
+
+    if (entry.get("author") != null) {
+      author.setId(entry.get("author").textValue());
+      author.setDisplayName(entry.get("author").textValue());
+
+      actor.setAuthor(author);
+      String uriToSet = entry.get("rssFeed") != null ? entry.get("rssFeed").asText() : null;
+
+      actor.setId("id:rss:" + uriToSet + ":" + author.getId());
+      actor.setDisplayName(author.getDisplayName());
     }
 
-    /**
-     * Given an RSS entry, extra out the author and actor information and return it
-     * in an actor object
-     *
-     * @param entry
-     * @return
-     */
-    private ActivityObject buildActor(ObjectNode entry) {
-        ActivityObject actor = new ActivityObject();
-        Author author = new Author();
-
-        if (entry.get("author") != null) {
-            author.setId(entry.get("author").textValue());
-            author.setDisplayName(entry.get("author").textValue());
-
-            actor.setAuthor(author);
-            String uriToSet = entry.get("rssFeed") != null ? entry.get("rssFeed").asText() : null;
-
-            actor.setId("id:rss:" + uriToSet + ":" + author.getId());
-            actor.setDisplayName(author.getDisplayName());
-        }
-
-        return actor;
+    return actor;
+  }
+
+  /**
+   * Given an RSS object, build the ActivityObject.
+   *
+   * @param entry ObjectNode
+   * @return $.object
+   */
+  private ActivityObject buildActivityObject(ObjectNode entry) {
+    ActivityObject activityObject = new ActivityObject();
+
+    JsonNode summary = entry.get("description");
+    if (summary != null) {
+      activityObject.setSummary(summary.textValue());
+    } else if ((summary = entry.get("title")) != null) {
+      activityObject.setSummary(summary.textValue());
     }
 
-    /**
-     * Given an RSS object, build the ActivityObject
-     *
-     * @param entry
-     * @return
-     */
-    private ActivityObject buildActivityObject(ObjectNode entry) {
-        ActivityObject activityObject = new ActivityObject();
+    return activityObject;
+  }
 
-        JsonNode summary = entry.get("description");
-        if (summary != null)
-            activityObject.setSummary(summary.textValue());
-        else if((summary = entry.get("title")) != null) {
-            activityObject.setSummary(summary.textValue());
-        }
+  /**
+   * Given an RSS object, build and return the Provider object.
+   *
+   * @param entry ObjectNode
+   * @return $.provider
+   */
+  private Provider buildProvider(ObjectNode entry) {
+    Provider provider = new Provider();
 
-        return activityObject;
-    }
+    String link = null;
+    String uri = null;
+    String resourceLocation = null;
 
-    /**
-     * Given an RSS object, build and return the Provider object
-     *
-     * @param entry
-     * @return
-     */
-    private Provider buildProvider(ObjectNode entry) {
-        Provider provider = new Provider();
-
-        String link = null;
-        String uri = null;
-        String resourceLocation = null;
-
-        if (entry.get("link") != null)
-            link = entry.get("link").textValue();
-        if (entry.get("uri") != null)
-            uri = entry.get("uri").textValue();
-
-        /*
-         * Order of precedence for resourceLocation selection
-         *
-         * 1. Valid URI
-         * 2. Valid Link
-         * 3. Non-null URI
-         * 4. Non-null Link
-         */
-        if(isValidResource(uri))
-            resourceLocation = uri;
-        else if(isValidResource(link))
-            resourceLocation = link;
-        else if(uri != null || link != null) {
-            resourceLocation = (uri != null) ? uri : link;
-        }
-
-        provider.setId("id:providers:rss");
-        provider.setUrl(resourceLocation);
-        provider.setDisplayName("RSS");
-
-        return provider;
+    if (entry.get("link") != null) {
+      link = entry.get("link").textValue();
     }
-
-    /**
-     * Tests whether or not the passed in resource is a valid URI
-     * @param resource
-     * @return boolean of whether or not the resource is valid
-     */
-    private boolean isValidResource(String resource) {
-        return resource != null && (resource.startsWith("http") || resource.startsWith("www"));
+    if (entry.get("uri") != null) {
+      uri = entry.get("uri").textValue();
     }
-
-    /**
-     * Given an RSS object and an existing activity,
-     * add the Rome extension to that activity and return it
+    /*
+     * Order of precedence for resourceLocation selection
      *
-     * @param activity
-     * @param entry
-     * @return
+     * 1. Valid URI
+     * 2. Valid Link
+     * 3. Non-null URI
+     * 4. Non-null Link
      */
-    private Activity addRomeExtension(Activity activity, ObjectNode entry) {
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-        ObjectNode activityRoot = mapper.convertValue(activity, ObjectNode.class);
-        ObjectNode extensions = JsonNodeFactory.instance.objectNode();
-
-        extensions.put("rome", entry);
-        activityRoot.put("extensions", extensions);
-
-        activity = mapper.convertValue(activityRoot, Activity.class);
-
-        return activity;
+    if (isValidResource(uri)) {
+      resourceLocation = uri;
+    } else if (isValidResource(link)) {
+      resourceLocation = link;
+    } else if (uri != null || link != null) {
+      resourceLocation = (uri != null) ? uri : link;
     }
+
+    provider.setId("id:providers:rss");
+    provider.setUrl(resourceLocation);
+    provider.setDisplayName("RSS");
+
+    return provider;
+  }
+
+  /**
+   * Tests whether or not the passed in resource is a valid URI.
+   * @param resource resource
+   * @return boolean of whether or not the resource is valid
+   */
+  private boolean isValidResource(String resource) {
+    return resource != null && (resource.startsWith("http") || resource.startsWith("www"));
+  }
+
+  /**
+   * Given an RSS object and an existing activity,
+   * add the Rome extension to that activity and return it.
+   *
+   * @param activity Activity
+   * @param entry ObjectNode
+   * @return Activity
+   */
+  private Activity addRomeExtension(Activity activity, ObjectNode entry) {
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+    ObjectNode activityRoot = mapper.convertValue(activity, ObjectNode.class);
+    ObjectNode extensions = JsonNodeFactory.instance.objectNode();
+
+    extensions.put("rome", entry);
+    activityRoot.put("extensions", extensions);
+
+    activity = mapper.convertValue(activityRoot, Activity.class);
+
+    return activity;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntrySerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntrySerializer.java b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntrySerializer.java
index 1135172..6868bfc 100644
--- a/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntrySerializer.java
+++ b/streams-contrib/streams-provider-rss/src/main/java/org/apache/streams/rss/serializer/SyndEntrySerializer.java
@@ -26,7 +26,12 @@ import com.sun.syndication.feed.module.Module;
 import com.sun.syndication.feed.rss.Category;
 import com.sun.syndication.feed.rss.Content;
 import com.sun.syndication.feed.rss.Enclosure;
-import com.sun.syndication.feed.synd.*;
+import com.sun.syndication.feed.synd.SyndContent;
+import com.sun.syndication.feed.synd.SyndEnclosure;
+import com.sun.syndication.feed.synd.SyndEntry;
+import com.sun.syndication.feed.synd.SyndFeed;
+import com.sun.syndication.feed.synd.SyndImage;
+import com.sun.syndication.feed.synd.SyndLinkImpl;
 import org.joda.time.format.DateTimeFormatter;
 import org.joda.time.format.ISODateTimeFormat;
 import org.slf4j.Logger;
@@ -42,267 +47,284 @@ import java.util.List;
  */
 public class SyndEntrySerializer {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(SyndEntrySerializer.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SyndEntrySerializer.class);
 
-    public ObjectNode deserialize(SyndEntry entry) {
-        return deserializeRomeEntry(entry);
-    }
-
-
-    public List<ObjectNode> deserializeAll(Collection<SyndEntry> entries) {
-        List<ObjectNode> result = Lists.newLinkedList();
-        for(SyndEntry entry : entries) {
-            result.add(deserialize(entry));
-        }
-        return result;
-    }
+  public ObjectNode deserialize(SyndEntry entry) {
+    return deserializeRomeEntry(entry);
+  }
 
+  private ObjectNode deserializeRomeEntry(SyndEntry entry) {
+    JsonNodeFactory factory = JsonNodeFactory.instance;
+    ObjectNode root = factory.objectNode();
 
+    serializeString(entry.getAuthor(), "author", root);
+    serializeListOfStrings(entry.getAuthors(), "authors", root, factory);
+    serializeCategories(root, factory, entry.getCategories());
+    serializeContents(root, factory, entry.getContents());
+    serializeListOfStrings(entry.getContributors(), "contributors", root, factory);
+    serializeDescription(root, factory, entry.getDescription());
+    serializeEnclosures(root, factory, entry.getEnclosures());
+    serializeForeignMarkUp(root, factory, entry.getForeignMarkup());
+    serializeString(entry.getLink(), "link", root);
+    serializeLinks(root, factory, entry.getLinks());
+    serializeModules(root, factory, entry.getModules());
+    serializeDate(root, entry.getPublishedDate(), "publishedDate");
+    serializeSource(root, factory, entry.getSource());
+    serializeString(entry.getTitle(), "title", root);
+    serializeDate(root, entry.getUpdatedDate(), "updateDate");
+    serializeString(entry.getUri(), "uri", root);
 
-    private ObjectNode deserializeRomeEntry(SyndEntry entry) {
-        JsonNodeFactory factory = JsonNodeFactory.instance;
-        ObjectNode root = factory.objectNode();
+    return root;
+  }
 
-        serializeString(entry.getAuthor(), "author", root);
-        serializeListOfStrings(entry.getAuthors(), "authors", root, factory);
-        serializeCategories(root, factory, entry.getCategories());
-        serializeContents(root, factory, entry.getContents());
-        serializeListOfStrings(entry.getContributors(), "contributors", root, factory);
-        serializeDescription(root, factory, entry.getDescription());
-        serializeEnclosures(root, factory, entry.getEnclosures());
-        serializeForeignMarkUp(root, factory, entry.getForeignMarkup());
-        serializeString(entry.getLink(), "link", root);
-        serializeLinks(root, factory, entry.getLinks());
-        serializeModules(root, factory, entry.getModules());
-        serializeDate(root, entry.getPublishedDate(), "publishedDate");
-        serializeSource(root, factory, entry.getSource());
-        serializeString(entry.getTitle(), "title", root);
-        serializeDate(root, entry.getUpdatedDate(), "updateDate");
-        serializeString(entry.getUri(), "uri", root);
 
-        return root;
+  private void serializeCategories(ObjectNode root, JsonNodeFactory factory, List categories) {
+    if (categories == null || categories.size() == 0) {
+      return;
     }
-
-
-    private void serializeCategories(ObjectNode root, JsonNodeFactory factory, List categories) {
-        if(categories == null || categories.size() == 0)
-            return;
-        ArrayNode cats = factory.arrayNode();
-        for(Object obj : categories) {
-            if(obj instanceof Category) {
-                ObjectNode catNode = factory.objectNode();
-                Category category = (Category) obj;
-                if(category.getDomain() != null)
-                    catNode.put("domain", category.getDomain());
-                if(category.getValue() != null)
-                    catNode.put("value", category.getValue());
-                cats.add(catNode);
-            }
-            else if(obj instanceof com.sun.syndication.feed.atom.Category) {
-                com.sun.syndication.feed.atom.Category category = (com.sun.syndication.feed.atom.Category) obj;
-                ObjectNode catNode = factory.objectNode();
-                if(category.getLabel() != null)
-                    catNode.put("label", category.getLabel());
-                if(category.getScheme() != null)
-                    catNode.put("scheme", category.getScheme());
-                if(category.getSchemeResolved() != null)
-                    catNode.put("schemeResolved", category.getSchemeResolved());
-                if(category.getTerm() != null )
-                    catNode.put("term", category.getTerm());
-                cats.add(catNode);
-            }
+    ArrayNode cats = factory.arrayNode();
+    for (Object obj : categories) {
+      if (obj instanceof Category) {
+        ObjectNode catNode = factory.objectNode();
+        Category category = (Category) obj;
+        if (category.getDomain() != null) {
+          catNode.put("domain", category.getDomain());
+        }
+        if (category.getValue() != null) {
+          catNode.put("value", category.getValue());
         }
-        root.put("categories", cats);
+        cats.add(catNode);
+      } else if (obj instanceof com.sun.syndication.feed.atom.Category) {
+        com.sun.syndication.feed.atom.Category category = (com.sun.syndication.feed.atom.Category) obj;
+        ObjectNode catNode = factory.objectNode();
+        if (category.getLabel() != null) {
+          catNode.put("label", category.getLabel());
+        }
+        if (category.getScheme() != null) {
+          catNode.put("scheme", category.getScheme());
+        }
+        if (category.getSchemeResolved() != null) {
+          catNode.put("schemeResolved", category.getSchemeResolved());
+        }
+        if (category.getTerm() != null ) {
+          catNode.put("term", category.getTerm());
+        }
+        cats.add(catNode);
+      }
     }
+    root.put("categories", cats);
+  }
 
-    private void serializeContents(ObjectNode root, JsonNodeFactory factory, List contents) {
-        if(contents == null || contents.size() == 0)
-            return;
-        ArrayNode contentsArray = factory.arrayNode();
-        for(Object obj : contents) {
-            ObjectNode content = factory.objectNode();
-            if(obj instanceof Content) {
-                Content rssContent = (Content) obj;
-                content.put("type", rssContent.getType());
-                content.put("value", rssContent.getValue());
-            }
-            if(obj instanceof com.sun.syndication.feed.atom.Content) {
-                com.sun.syndication.feed.atom.Content atomContent = (com.sun.syndication.feed.atom.Content) obj;
-                content.put("type", atomContent.getType());
-                content.put("value", atomContent.getValue());
-                content.put("mode", atomContent.getMode());
-                content.put("src", atomContent.getSrc());
-            }
-            contentsArray.add(content);
-        }
-        root.put("contents", contentsArray);
+  private void serializeContents(ObjectNode root, JsonNodeFactory factory, List contents) {
+    if (contents == null || contents.size() == 0) {
+      return;
+    }
+    ArrayNode contentsArray = factory.arrayNode();
+    for (Object obj : contents) {
+      ObjectNode content = factory.objectNode();
+      if (obj instanceof Content) {
+        Content rssContent = (Content) obj;
+        content.put("type", rssContent.getType());
+        content.put("value", rssContent.getValue());
+      }
+      if (obj instanceof com.sun.syndication.feed.atom.Content) {
+        com.sun.syndication.feed.atom.Content atomContent = (com.sun.syndication.feed.atom.Content) obj;
+        content.put("type", atomContent.getType());
+        content.put("value", atomContent.getValue());
+        content.put("mode", atomContent.getMode());
+        content.put("src", atomContent.getSrc());
+      }
+      contentsArray.add(content);
     }
+    root.put("contents", contentsArray);
+  }
 
-    private void serializeDate(ObjectNode root, Date date, String key) {
-        DateTimeFormatter formatter = ISODateTimeFormat.dateTime();
-        if(date == null)
-            return;
-        root.put(key, formatter.print(date.getTime()));
+  private void serializeDate(ObjectNode root, Date date, String key) {
+    DateTimeFormatter formatter = ISODateTimeFormat.dateTime();
+    if (date == null) {
+      return;
     }
+    root.put(key, formatter.print(date.getTime()));
+  }
 
-    private void serializeDescription(ObjectNode root, JsonNodeFactory factory, SyndContent synd) {
-        if(synd == null)
-            return;
-        ObjectNode content = factory.objectNode();
-        if(synd.getValue() != null)
-            content.put("value", synd.getValue());
-        if(synd.getMode() != null)
-            content.put("mode", synd.getMode());
-        if(synd.getType() != null)
-            content.put("type", synd.getType());
-        root.put("description", content);
+  private void serializeDescription(ObjectNode root, JsonNodeFactory factory, SyndContent synd) {
+    if (synd == null) {
+      return;
+    }
+    ObjectNode content = factory.objectNode();
+    if (synd.getValue() != null) {
+      content.put("value", synd.getValue());
     }
+    if (synd.getMode() != null) {
+      content.put("mode", synd.getMode());
+    }
+    if (synd.getType() != null) {
+      content.put("type", synd.getType());
+    }
+    root.put("description", content);
+  }
 
-    private void serializeEnclosures(ObjectNode root, JsonNodeFactory factory, List enclosures) {
-        if(enclosures == null || enclosures.size() == 0)
-            return;
-        ArrayNode encls = factory.arrayNode();
-        for(Object obj : enclosures) {
-            if(obj instanceof Enclosure){
-                Enclosure enclosure = (Enclosure) obj;
-                ObjectNode encl = factory.objectNode();
-                if(enclosure.getType() != null)
-                    encl.put("type", enclosure.getType());
-                if(enclosure.getUrl() != null)
-                    encl.put("url", enclosure.getUrl());
-                encl.put("length", enclosure.getLength());
-                encls.add(encl);
-            } else if(obj instanceof SyndEnclosure) {
-                SyndEnclosure enclosure = (SyndEnclosure) obj;
-                ObjectNode encl = factory.objectNode();
-                if(enclosure.getType() != null)
-                    encl.put("type", enclosure.getType());
-                if(enclosure.getUrl() != null)
-                    encl.put("url", enclosure.getUrl());
-                encl.put("length", enclosure.getLength());
-                encls.add(encl);
-            } else {
-                LOGGER.warn("serializeEnclosures does not handle type : {}", obj.getClass().toString());
-            }
+  private void serializeEnclosures(ObjectNode root, JsonNodeFactory factory, List enclosures) {
+    if (enclosures == null || enclosures.size() == 0) {
+      return;
+    }
+    ArrayNode encls = factory.arrayNode();
+    for (Object obj : enclosures) {
+      if (obj instanceof Enclosure) {
+        Enclosure enclosure = (Enclosure) obj;
+        ObjectNode encl = factory.objectNode();
+        if (enclosure.getType() != null) {
+          encl.put("type", enclosure.getType());
+        }
+        if (enclosure.getUrl() != null) {
+          encl.put("url", enclosure.getUrl());
         }
-        root.put("enclosures", encls);
+        encl.put("length", enclosure.getLength());
+        encls.add(encl);
+      } else if (obj instanceof SyndEnclosure) {
+        SyndEnclosure enclosure = (SyndEnclosure) obj;
+        ObjectNode encl = factory.objectNode();
+        if (enclosure.getType() != null) {
+          encl.put("type", enclosure.getType());
+        }
+        if (enclosure.getUrl() != null) {
+          encl.put("url", enclosure.getUrl());
+        }
+        encl.put("length", enclosure.getLength());
+        encls.add(encl);
+      } else {
+        LOGGER.warn("serializeEnclosures does not handle type : {}", obj.getClass().toString());
+      }
     }
+    root.put("enclosures", encls);
+  }
 
-    private void serializeForeignMarkUp(ObjectNode root, JsonNodeFactory factory, Object foreignMarkUp) {
-        if(foreignMarkUp == null)
-            return;
-        if(foreignMarkUp instanceof String) {
-            root.put("foreignEnclosures", (String) foreignMarkUp);
-        } else if (foreignMarkUp instanceof List) {
-            List foreignList = (List) foreignMarkUp;
-            if(foreignList.size() == 0)
-                return;
-            if(foreignList.get(0) instanceof String) {
-                serializeListOfStrings(foreignList, "foreignEnclosures", root, factory);
-            } else {
-                LOGGER.debug("SyndEntry.getForeignMarkUp is not of type String. Need to handle the case of class : {}", ((List)foreignMarkUp).get(0).getClass().toString());
-            }
-        } else {
-            LOGGER.debug("SyndEntry.getForeignMarkUp is not of an expected type. Need to handle the case of class : {}", foreignMarkUp.getClass().toString());
-        }
+  private void serializeForeignMarkUp(ObjectNode root, JsonNodeFactory factory, Object foreignMarkUp) {
+    if (foreignMarkUp == null) {
+      return;
     }
+    if (foreignMarkUp instanceof String) {
+      root.put("foreignEnclosures", (String) foreignMarkUp);
+    } else if (foreignMarkUp instanceof List) {
+      List foreignList = (List) foreignMarkUp;
+      if (foreignList.size() == 0) {
+        return;
+      }
+      if (foreignList.get(0) instanceof String) {
+        serializeListOfStrings(foreignList, "foreignEnclosures", root, factory);
+      } else {
+        LOGGER.debug("SyndEntry.getForeignMarkUp is not of type String. Need to handle the case of class : {}",
+            ((List)foreignMarkUp).get(0).getClass().toString());
+      }
+    } else {
+      LOGGER.debug("SyndEntry.getForeignMarkUp is not of an expected type. Need to handle the case of class : {}",
+          foreignMarkUp.getClass().toString());
+    }
+  }
 
-    private void serializeImage(ObjectNode root, JsonNodeFactory factory, SyndImage image) {
-        if(image == null)
-            return;
-        ObjectNode imageNode = factory.objectNode();
-        serializeString(image.getDescription(), "description", imageNode);
-        serializeString(image.getLink(), "link", imageNode);
-        serializeString(image.getUrl(), "url", imageNode);
-        serializeString(image.getTitle(), "title", imageNode);
-        root.put("image", imageNode);
+  private void serializeImage(ObjectNode root, JsonNodeFactory factory, SyndImage image) {
+    if (image == null) {
+      return;
     }
+    ObjectNode imageNode = factory.objectNode();
+    serializeString(image.getDescription(), "description", imageNode);
+    serializeString(image.getLink(), "link", imageNode);
+    serializeString(image.getUrl(), "url", imageNode);
+    serializeString(image.getTitle(), "title", imageNode);
+    root.put("image", imageNode);
+  }
 
-    private void serializeListOfStrings(List toSerialize, String key, ObjectNode node, JsonNodeFactory factory) {
-        if(toSerialize == null || toSerialize.size() == 0)
-            return;
-        ArrayNode keyNode = factory.arrayNode();
-        for(Object obj : toSerialize) {
-            if(obj instanceof String) {
-                keyNode.add((String) obj);
-            } else {
-                LOGGER.debug("Array at Key:{} was expecting item types of String. Received class : {}", key, obj.getClass().toString());
-            }
-        }
-        node.put(key, keyNode);
+  private void serializeListOfStrings(List toSerialize, String key, ObjectNode node, JsonNodeFactory factory) {
+    if (toSerialize == null || toSerialize.size() == 0) {
+      return;
     }
+    ArrayNode keyNode = factory.arrayNode();
+    for (Object obj : toSerialize) {
+      if (obj instanceof String) {
+        keyNode.add((String) obj);
+      } else {
+        LOGGER.debug("Array at Key:{} was expecting item types of String. Received class : {}", key, obj.getClass().toString());
+      }
+    }
+    node.put(key, keyNode);
+  }
 
-    private void serializeLinks(ObjectNode root, JsonNodeFactory factory, List links) {
-        if(links == null || links.size() == 0) {
-            return;
-        } else if(links.get(0) instanceof String) {
-            serializeListOfStrings(links, "links", root, factory);
-        } else if(links.get(0) instanceof SyndLinkImpl) {
-            ArrayNode linksArray = factory.arrayNode();
-            SyndLinkImpl syndLink;
-            ObjectNode linkNode;
-            for(Object obj : links) {
-                linkNode = factory.objectNode();
-                syndLink = (SyndLinkImpl) obj;
-                linkNode.put("rel", syndLink.getRel());
-                linkNode.put("href", syndLink.getHref());
-                linkNode.put("type", syndLink.getType());
-                linkNode.put("length", syndLink.getLength());
-                linkNode.put("hrefLang", syndLink.getHreflang());
-                linkNode.put("title", syndLink.getTitle());
-                linksArray.add(linkNode);
-            }
-            root.put("links", linksArray);
-        } else {
-            LOGGER.error("No implementation for handling links of class : {}", links.get(0).getClass().toString());
-        }
+  private void serializeLinks(ObjectNode root, JsonNodeFactory factory, List links) {
+    if (links == null || links.size() == 0) {
+      return;
+    } else if (links.get(0) instanceof String) {
+      serializeListOfStrings(links, "links", root, factory);
+    } else if (links.get(0) instanceof SyndLinkImpl) {
+      ArrayNode linksArray = factory.arrayNode();
+      SyndLinkImpl syndLink;
+      ObjectNode linkNode;
+      for (Object obj : links) {
+        linkNode = factory.objectNode();
+        syndLink = (SyndLinkImpl) obj;
+        linkNode.put("rel", syndLink.getRel());
+        linkNode.put("href", syndLink.getHref());
+        linkNode.put("type", syndLink.getType());
+        linkNode.put("length", syndLink.getLength());
+        linkNode.put("hrefLang", syndLink.getHreflang());
+        linkNode.put("title", syndLink.getTitle());
+        linksArray.add(linkNode);
+      }
+      root.put("links", linksArray);
+    } else {
+      LOGGER.error("No implementation for handling links of class : {}", links.get(0).getClass().toString());
     }
+  }
 
-    private void serializeModules(ObjectNode root, JsonNodeFactory factory, List modules) {
-        if(modules == null || modules.size() == 0)
-            return;
-        ArrayNode modulesArray = factory.arrayNode();
-        for(Object obj : modules) {
-            if(obj instanceof Module) {
-                Module mod = (Module) obj;
-                if(mod.getUri() != null)
-                    modulesArray.add(mod.getUri());
-            } else {
-                LOGGER.debug("SyndEntry.getModules() items are not of type Module. Need to handle the case of class : {}", obj.getClass().toString());
-            }
+  private void serializeModules(ObjectNode root, JsonNodeFactory factory, List modules) {
+    if (modules == null || modules.size() == 0) {
+      return;
+    }
+    ArrayNode modulesArray = factory.arrayNode();
+    for (Object obj : modules) {
+      if (obj instanceof Module) {
+        Module mod = (Module) obj;
+        if (mod.getUri() != null) {
+          modulesArray.add(mod.getUri());
         }
-        root.put("modules", modulesArray);
+      } else {
+        LOGGER.debug("SyndEntry.getModules() items are not of type Module. Need to handle the case of class : {}",
+            obj.getClass().toString());
+      }
     }
+    root.put("modules", modulesArray);
+  }
 
-    private void serializeSource(ObjectNode root, JsonNodeFactory factory, SyndFeed source) {
-        if(source == null)
-            return;
-        ObjectNode sourceNode = factory.objectNode();
-        serializeString(source.getAuthor(), "author", sourceNode);
-        serializeListOfStrings(source.getAuthors(), "authors", sourceNode, factory);
-        serializeCategories(sourceNode, factory, source.getCategories());
-        serializeString(source.getCopyright(), "copyright", sourceNode);
-        serializeListOfStrings(source.getContributors(), "contributors", sourceNode, factory);
-        serializeString(source.getDescription(), "description", sourceNode);
-        serializeDescription(sourceNode, factory, source.getDescriptionEx());
-        // source.getEntries(); wtf?
-        serializeString(source.getFeedType(), "feedType", sourceNode);
-        serializeImage(sourceNode, factory, source.getImage());
-        serializeForeignMarkUp(sourceNode, factory, source.getForeignMarkup());
-        serializeString(source.getLanguage(), "language", sourceNode);
-        serializeString(source.getLink(), "link", sourceNode);
-        serializeListOfStrings(source.getLinks(), "links", sourceNode, factory);
-        serializeModules(sourceNode, factory, source.getModules());
-        serializeDate(sourceNode, source.getPublishedDate(), "publishedDate");
-        serializeString(source.getTitle(), "title", sourceNode);
-        serializeString(source.getUri(), "uri", sourceNode);
-
-        root.put("source", sourceNode);
+  private void serializeSource(ObjectNode root, JsonNodeFactory factory, SyndFeed source) {
+    if (source == null) {
+      return;
     }
+    ObjectNode sourceNode = factory.objectNode();
+    serializeString(source.getAuthor(), "author", sourceNode);
+    serializeListOfStrings(source.getAuthors(), "authors", sourceNode, factory);
+    serializeCategories(sourceNode, factory, source.getCategories());
+    serializeString(source.getCopyright(), "copyright", sourceNode);
+    serializeListOfStrings(source.getContributors(), "contributors", sourceNode, factory);
+    serializeString(source.getDescription(), "description", sourceNode);
+    serializeDescription(sourceNode, factory, source.getDescriptionEx());
+    // source.getEntries(); wtf?
+    serializeString(source.getFeedType(), "feedType", sourceNode);
+    serializeImage(sourceNode, factory, source.getImage());
+    serializeForeignMarkUp(sourceNode, factory, source.getForeignMarkup());
+    serializeString(source.getLanguage(), "language", sourceNode);
+    serializeString(source.getLink(), "link", sourceNode);
+    serializeListOfStrings(source.getLinks(), "links", sourceNode, factory);
+    serializeModules(sourceNode, factory, source.getModules());
+    serializeDate(sourceNode, source.getPublishedDate(), "publishedDate");
+    serializeString(source.getTitle(), "title", sourceNode);
+    serializeString(source.getUri(), "uri", sourceNode);
+
+    root.put("source", sourceNode);
+  }
 
-    private void serializeString(String string, String key, ObjectNode node) {
-        if(string != null && !string.equals(""))
-            node.put(key, string);
+  private void serializeString(String string, String key, ObjectNode node) {
+    if (string != null && !string.equals("")) {
+      node.put(key, string);
     }
+  }
 
 }


[07/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueue.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueue.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueue.java
index deb657a..aae12a0 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueue.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueue.java
@@ -18,11 +18,11 @@
 package org.apache.streams.local.queues;
 
 import org.apache.streams.local.builders.LocalStreamBuilder;
+
+import org.apache.commons.lang.NotImplementedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.commons.lang.NotImplementedException;
 
-import javax.management.*;
 import java.lang.management.ManagementFactory;
 import java.util.Collection;
 import java.util.Iterator;
@@ -32,6 +32,12 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
+import javax.management.InstanceAlreadyExistsException;
+import javax.management.MBeanRegistrationException;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.NotCompliantMBeanException;
+import javax.management.ObjectName;
 
 /**
  * A {@link java.util.concurrent.BlockingQueue} implementation that allows the measure measurement of how
@@ -44,437 +50,437 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
  */
 public class ThroughputQueue<E> implements BlockingQueue<E>, ThroughputQueueMXBean {
 
-    public static final String NAME_TEMPLATE = "org.apache.streams.local:type=ThroughputQueue,name=%s,identifier=%s,startedAt=%s";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(ThroughputQueue.class);
-
-    private BlockingQueue<ThroughputElement<E>> underlyingQueue;
-    private AtomicLong elementsAdded;
-    private AtomicLong elementsRemoved;
-    private AtomicLong startTime;
-    private AtomicLong totalQueueTime;
-    private long maxQueuedTime;
-    private volatile boolean active;
-    private ReadWriteLock maxQueueTimeLock;
-
-    /**
-     * Creates an unbounded, unregistered {@code ThroughputQueue}
-     */
-    public ThroughputQueue() {
-        this(-1, null, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
+  public static final String NAME_TEMPLATE = "org.apache.streams.local:type=ThroughputQueue,name=%s,identifier=%s,startedAt=%s";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(ThroughputQueue.class);
+
+  private BlockingQueue<ThroughputElement<E>> underlyingQueue;
+  private AtomicLong elementsAdded;
+  private AtomicLong elementsRemoved;
+  private AtomicLong startTime;
+  private AtomicLong totalQueueTime;
+  private long maxQueuedTime;
+  private volatile boolean active;
+  private ReadWriteLock maxQueueTimeLock;
+
+  /**
+   * Creates an unbounded, unregistered {@code ThroughputQueue}
+   */
+  public ThroughputQueue() {
+    this(-1, null, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
+  }
+
+  /**
+   *
+   * @param streamIdentifier
+   * @param startedAt
+   */
+  public ThroughputQueue(String streamIdentifier, long startedAt) {
+    this(-1, null, streamIdentifier, startedAt);
+  }
+
+  /**
+   * Creates a bounded, unregistered {@code ThroughputQueue}
+   *
+   * @param maxSize maximum capacity of queue, if maxSize < 1 then unbounded
+   */
+  public ThroughputQueue(int maxSize) {
+    this(maxSize, null, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
+  }
+
+  /**
+   *
+   * @param maxSize
+   * @param streamIdentifier
+   * @param startedAt
+   */
+  public ThroughputQueue(int maxSize, String streamIdentifier, long startedAt) {
+    this(maxSize, null, streamIdentifier, startedAt);
+  }
+
+  /**
+   * Creates an unbounded, registered {@code ThroughputQueue}
+   *
+   * @param id unique id for this queue to be registered with. if id == NULL then not registered
+   */
+  public ThroughputQueue(String id) {
+    this(-1, id, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
+  }
+
+  /**
+   *
+   * @param id
+   * @param streamIdentifier
+   * @param startedAt
+   */
+  public ThroughputQueue(String id, String streamIdentifier, long startedAt) {
+    this(-1, id, streamIdentifier, startedAt);
+  }
+
+  /**
+   *
+   * @param maxSize
+   * @param id
+   */
+  public ThroughputQueue(int maxSize, String id) {
+    this(maxSize, id, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
+
+  }
+
+  /**
+   * Creates a bounded, registered {@code ThroughputQueue}
+   *
+   * @param maxSize maximum capacity of queue, if maxSize < 1 then unbounded
+   * @param id      unique id for this queue to be registered with. if id == NULL then not registered
+   */
+  public ThroughputQueue(int maxSize, String id, String streamIdentifier, long startedAt) {
+    if (maxSize < 1) {
+      this.underlyingQueue = new LinkedBlockingQueue<>();
+    } else {
+      this.underlyingQueue = new LinkedBlockingQueue<>(maxSize);
+    }
+    this.elementsAdded = new AtomicLong(0);
+    this.elementsRemoved = new AtomicLong(0);
+    this.startTime = new AtomicLong(-1);
+    this.active = false;
+    this.maxQueuedTime = 0;
+    this.maxQueueTimeLock = new ReentrantReadWriteLock();
+    this.totalQueueTime = new AtomicLong(0);
+    if (id != null) {
+      try {
+        ObjectName name = new ObjectName(String.format(NAME_TEMPLATE, id, streamIdentifier, startedAt));
+        MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+        mbs.registerMBean(this, name);
+      } catch (MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) {
+        LOGGER.error("Failed to register MXBean : {}", e);
+        throw new RuntimeException(e);
+      }
+    }
+  }
+
+  @Override
+  public boolean add(E e) {
+    if (this.underlyingQueue.add(new ThroughputElement<E>(e))) {
+      internalAddElement();
+      return true;
+    }
+    return false;
+  }
+
+  @Override
+  public boolean offer(E e) {
+    if (this.underlyingQueue.offer(new ThroughputElement<E>(e))) {
+      internalAddElement();
+      return true;
+    }
+    return false;
+  }
+
+  @Override
+  public void put(E e) throws InterruptedException {
+    this.underlyingQueue.put(new ThroughputElement<E>(e));
+    internalAddElement();
+  }
+
+  @Override
+  public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException {
+    if (this.underlyingQueue.offer(new ThroughputElement<E>(e), timeout, unit)) {
+      internalAddElement();
+      return true;
+    }
+    return false;
+  }
+
+  @Override
+  public E take() throws InterruptedException {
+    ThroughputElement<E> e = this.underlyingQueue.take();
+    internalRemoveElement(e);
+    return e.getElement();
+  }
+
+  @Override
+  public E poll(long timeout, TimeUnit unit) throws InterruptedException {
+    ThroughputElement<E> e = this.underlyingQueue.poll(timeout, unit);
+    if(e != null) {
+      internalRemoveElement(e);
+      return e.getElement();
+    }
+    return null;
+  }
+
+  @Override
+  public int remainingCapacity() {
+    return this.underlyingQueue.remainingCapacity();
+  }
+
+  @Override
+  public boolean remove(Object o) {
+    try {
+      return this.underlyingQueue.remove(new ThroughputElement<E>((E) o));
+    } catch (ClassCastException cce) {
+      return false;
+    }
+  }
+
+  @Override
+  public boolean contains(Object o) {
+    try {
+      return this.underlyingQueue.contains(new ThroughputElement<E>((E) o));
+    } catch (ClassCastException cce) {
+      return false;
+    }
+  }
+
+  @Override
+  public int drainTo(Collection<? super E> c) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public int drainTo(Collection<? super E> c, int maxElements) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public E remove() {
+    ThroughputElement<E> e = this.underlyingQueue.remove();
+    if(e != null) {
+      internalRemoveElement(e);
+      return e.getElement();
+    }
+    return null;
+  }
+
+  @Override
+  public E poll() {
+    ThroughputElement<E> e = this.underlyingQueue.poll();
+    if(e != null) {
+      internalRemoveElement(e);
+      return e.getElement();
+    }
+    return null;
+  }
+
+  @Override
+  public E element() {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public E peek() {
+    ThroughputElement<E> e = this.underlyingQueue.peek();
+    if( e != null) {
+      return e.getElement();
+    }
+    return null;
+  }
+
+  @Override
+  public int size() {
+    return this.underlyingQueue.size();
+  }
+
+  @Override
+  public boolean isEmpty() {
+    return this.underlyingQueue.isEmpty();
+  }
+
+  @Override
+  public Iterator<E> iterator() {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public Object[] toArray() {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public <T> T[] toArray(T[] a) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public boolean containsAll(Collection<?> c) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public boolean addAll(Collection<? extends E> c) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public boolean removeAll(Collection<?> c) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public boolean retainAll(Collection<?> c) {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public void clear() {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public long getCurrentSize() {
+    return this.elementsAdded.get() - this.elementsRemoved.get();
+  }
+
+  /**
+   * If elements have been removed from the queue or no elements have been added, it returns the average wait time
+   * in milliseconds. If elements have been added, but none have been removed, it returns the time waited by the first
+   * element in the queue.
+   *
+   * @return the average wait time in milliseconds
+   */
+  @Override
+  public double getAvgWait() {
+    if (this.elementsRemoved.get() == 0) {
+      if (this.getCurrentSize() > 0) {
+        return this.underlyingQueue.peek().getWaited();
+      } else {
+        return 0.0;
+      }
+    } else {
+      return (double) this.totalQueueTime.get() / (double) this.elementsRemoved.get();
+    }
+  }
+
+  @Override
+  public long getMaxWait() {
+    ThroughputElement<E> e = this.underlyingQueue.peek();
+    long max = -1;
+    try {
+      this.maxQueueTimeLock.readLock().lock();
+      if (e != null && e.getWaited() > this.maxQueuedTime) {
+        max = e.getWaited();
+      } else {
+        max = this.maxQueuedTime;
+      }
+    } finally {
+      this.maxQueueTimeLock.readLock().unlock();
+    }
+    return max;
+  }
+
+  @Override
+  public long getRemoved() {
+    return this.elementsRemoved.get();
+  }
+
+  @Override
+  public long getAdded() {
+    return this.elementsAdded.get();
+  }
+
+  @Override
+  public double getThroughput() {
+    if (active) {
+      return this.elementsRemoved.get() / ((System.currentTimeMillis() - this.startTime.get()) / 1000.0);
+    }
+    return 0.0;
+  }
+
+  /**
+   * Handles updating the stats whenever elements are added to the queue
+   */
+  private void internalAddElement() {
+    this.elementsAdded.incrementAndGet();
+    synchronized (this) {
+      if (!this.active) {
+        this.startTime.set(System.currentTimeMillis());
+        this.active = true;
+      }
+    }
+  }
+
+  /**
+   * Handle updating the stats whenever elements are removed from the queue
+   * @param e Element removed
+   */
+  private void internalRemoveElement(ThroughputElement<E> e) {
+    if(e != null) {
+      this.elementsRemoved.incrementAndGet();
+      Long queueTime = e.getWaited();
+      this.totalQueueTime.addAndGet(queueTime);
+      boolean unlocked = false;
+      try {
+        this.maxQueueTimeLock.readLock().lock();
+        if (this.maxQueuedTime < queueTime) {
+          this.maxQueueTimeLock.readLock().unlock();
+          unlocked = true;
+          try {
+            this.maxQueueTimeLock.writeLock().lock();
+            this.maxQueuedTime = queueTime;
+          } finally {
+            this.maxQueueTimeLock.writeLock().unlock();
+          }
+        }
+      } finally {
+        if (!unlocked)
+          this.maxQueueTimeLock.readLock().unlock();
+      }
     }
+  }
 
-    /**
-     *
-     * @param streamIdentifier
-     * @param startedAt
-     */
-    public ThroughputQueue(String streamIdentifier, long startedAt) {
-        this(-1, null, streamIdentifier, startedAt);
-    }
 
-    /**
-     * Creates a bounded, unregistered {@code ThroughputQueue}
-     *
-     * @param maxSize maximum capacity of queue, if maxSize < 1 then unbounded
-     */
-    public ThroughputQueue(int maxSize) {
-        this(maxSize, null, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
-    }
+  /**
+   * Element wrapper to measure time waiting on the queue
+   *
+   * @param <E>
+   */
+  private class ThroughputElement<E> {
 
-    /**
-     *
-     * @param maxSize
-     * @param streamIdentifier
-     * @param startedAt
-     */
-    public ThroughputQueue(int maxSize, String streamIdentifier, long startedAt) {
-        this(maxSize, null, streamIdentifier, startedAt);
-    }
-
-    /**
-     * Creates an unbounded, registered {@code ThroughputQueue}
-     *
-     * @param id unique id for this queue to be registered with. if id == NULL then not registered
-     */
-    public ThroughputQueue(String id) {
-        this(-1, id, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
-    }
+    private long queuedTime;
+    private E element;
 
-    /**
-     *
-     * @param id
-     * @param streamIdentifier
-     * @param startedAt
-     */
-    public ThroughputQueue(String id, String streamIdentifier, long startedAt) {
-        this(-1, id, streamIdentifier, startedAt);
+    protected ThroughputElement(E element) {
+      this.element = element;
+      this.queuedTime = System.currentTimeMillis();
     }
 
     /**
+     * Get the time this element has been waiting on the queue.
+     * current time - time element was queued
      *
-     * @param maxSize
-     * @param id
+     * @return time this element has been waiting on the queue in milliseconds
      */
-    public ThroughputQueue(int maxSize, String id) {
-        this(maxSize, id, LocalStreamBuilder.DEFAULT_STREAM_IDENTIFIER, -1);
-
+    public long getWaited() {
+      return System.currentTimeMillis() - this.queuedTime;
     }
 
     /**
-     * Creates a bounded, registered {@code ThroughputQueue}
+     * Get the queued element
      *
-     * @param maxSize maximum capacity of queue, if maxSize < 1 then unbounded
-     * @param id      unique id for this queue to be registered with. if id == NULL then not registered
+     * @return the element
      */
-    public ThroughputQueue(int maxSize, String id, String streamIdentifier, long startedAt) {
-        if (maxSize < 1) {
-            this.underlyingQueue = new LinkedBlockingQueue<>();
-        } else {
-            this.underlyingQueue = new LinkedBlockingQueue<>(maxSize);
-        }
-        this.elementsAdded = new AtomicLong(0);
-        this.elementsRemoved = new AtomicLong(0);
-        this.startTime = new AtomicLong(-1);
-        this.active = false;
-        this.maxQueuedTime = 0;
-        this.maxQueueTimeLock = new ReentrantReadWriteLock();
-        this.totalQueueTime = new AtomicLong(0);
-        if (id != null) {
-            try {
-                ObjectName name = new ObjectName(String.format(NAME_TEMPLATE, id, streamIdentifier, startedAt));
-                MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-                mbs.registerMBean(this, name);
-            } catch (MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) {
-                LOGGER.error("Failed to register MXBean : {}", e);
-                throw new RuntimeException(e);
-            }
-        }
-    }
-
-    @Override
-    public boolean add(E e) {
-        if (this.underlyingQueue.add(new ThroughputElement<E>(e))) {
-            internalAddElement();
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public boolean offer(E e) {
-        if (this.underlyingQueue.offer(new ThroughputElement<E>(e))) {
-            internalAddElement();
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public void put(E e) throws InterruptedException {
-        this.underlyingQueue.put(new ThroughputElement<E>(e));
-        internalAddElement();
-    }
-
-    @Override
-    public boolean offer(E e, long timeout, TimeUnit unit) throws InterruptedException {
-        if (this.underlyingQueue.offer(new ThroughputElement<E>(e), timeout, unit)) {
-            internalAddElement();
-            return true;
-        }
-        return false;
-    }
-
-    @Override
-    public E take() throws InterruptedException {
-        ThroughputElement<E> e = this.underlyingQueue.take();
-        internalRemoveElement(e);
-        return e.getElement();
-    }
-
-    @Override
-    public E poll(long timeout, TimeUnit unit) throws InterruptedException {
-        ThroughputElement<E> e = this.underlyingQueue.poll(timeout, unit);
-        if(e != null) {
-            internalRemoveElement(e);
-            return e.getElement();
-        }
-        return null;
-    }
-
-    @Override
-    public int remainingCapacity() {
-        return this.underlyingQueue.remainingCapacity();
-    }
-
-    @Override
-    public boolean remove(Object o) {
-        try {
-            return this.underlyingQueue.remove(new ThroughputElement<E>((E) o));
-        } catch (ClassCastException cce) {
-            return false;
-        }
-    }
-
-    @Override
-    public boolean contains(Object o) {
-        try {
-            return this.underlyingQueue.contains(new ThroughputElement<E>((E) o));
-        } catch (ClassCastException cce) {
-            return false;
-        }
-    }
-
-    @Override
-    public int drainTo(Collection<? super E> c) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public int drainTo(Collection<? super E> c, int maxElements) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public E remove() {
-        ThroughputElement<E> e = this.underlyingQueue.remove();
-        if(e != null) {
-            internalRemoveElement(e);
-            return e.getElement();
-        }
-        return null;
-    }
-
-    @Override
-    public E poll() {
-        ThroughputElement<E> e = this.underlyingQueue.poll();
-        if(e != null) {
-            internalRemoveElement(e);
-            return e.getElement();
-        }
-        return null;
+    public E getElement() {
+      return this.element;
     }
 
-    @Override
-    public E element() {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public E peek() {
-        ThroughputElement<E> e = this.underlyingQueue.peek();
-        if( e != null) {
-            return e.getElement();
-        }
-        return null;
-    }
-
-    @Override
-    public int size() {
-        return this.underlyingQueue.size();
-    }
-
-    @Override
-    public boolean isEmpty() {
-        return this.underlyingQueue.isEmpty();
-    }
-
-    @Override
-    public Iterator<E> iterator() {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public Object[] toArray() {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public <T> T[] toArray(T[] a) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public boolean containsAll(Collection<?> c) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public boolean addAll(Collection<? extends E> c) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public boolean removeAll(Collection<?> c) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public boolean retainAll(Collection<?> c) {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public void clear() {
-        throw new NotImplementedException();
-    }
-
-    @Override
-    public long getCurrentSize() {
-        return this.elementsAdded.get() - this.elementsRemoved.get();
-    }
 
     /**
-     * If elements have been removed from the queue or no elements have been added, it returns the average wait time
-     * in milliseconds. If elements have been added, but none have been removed, it returns the time waited by the first
-     * element in the queue.
-     *
-     * @return the average wait time in milliseconds
+     * Measures equality by the element and ignores the queued time
+     * @param obj
+     * @return
      */
     @Override
-    public double getAvgWait() {
-        if (this.elementsRemoved.get() == 0) {
-            if (this.getCurrentSize() > 0) {
-                return this.underlyingQueue.peek().getWaited();
-            } else {
-                return 0.0;
-            }
+    public boolean equals(Object obj) {
+      if(obj instanceof ThroughputElement && obj != null) {
+        ThroughputElement that = (ThroughputElement) obj;
+        if(that.getElement() == null && this.getElement() == null) {
+          return true;
+        } else if(that.getElement() != null) {
+          return that.getElement().equals(this.getElement());
         } else {
-            return (double) this.totalQueueTime.get() / (double) this.elementsRemoved.get();
-        }
-    }
-
-    @Override
-    public long getMaxWait() {
-        ThroughputElement<E> e = this.underlyingQueue.peek();
-        long max = -1;
-        try {
-            this.maxQueueTimeLock.readLock().lock();
-            if (e != null && e.getWaited() > this.maxQueuedTime) {
-                max = e.getWaited();
-            } else {
-                max = this.maxQueuedTime;
-            }
-        } finally {
-            this.maxQueueTimeLock.readLock().unlock();
-        }
-        return max;
-    }
-
-    @Override
-    public long getRemoved() {
-        return this.elementsRemoved.get();
-    }
-
-    @Override
-    public long getAdded() {
-        return this.elementsAdded.get();
-    }
-
-    @Override
-    public double getThroughput() {
-        if (active) {
-            return this.elementsRemoved.get() / ((System.currentTimeMillis() - this.startTime.get()) / 1000.0);
-        }
-        return 0.0;
-    }
-
-    /**
-     * Handles updating the stats whenever elements are added to the queue
-     */
-    private void internalAddElement() {
-        this.elementsAdded.incrementAndGet();
-        synchronized (this) {
-            if (!this.active) {
-                this.startTime.set(System.currentTimeMillis());
-                this.active = true;
-            }
-        }
-    }
-
-    /**
-     * Handle updating the stats whenever elements are removed from the queue
-     * @param e Element removed
-     */
-    private void internalRemoveElement(ThroughputElement<E> e) {
-        if(e != null) {
-            this.elementsRemoved.incrementAndGet();
-            Long queueTime = e.getWaited();
-            this.totalQueueTime.addAndGet(queueTime);
-            boolean unlocked = false;
-            try {
-                this.maxQueueTimeLock.readLock().lock();
-                if (this.maxQueuedTime < queueTime) {
-                    this.maxQueueTimeLock.readLock().unlock();
-                    unlocked = true;
-                    try {
-                        this.maxQueueTimeLock.writeLock().lock();
-                        this.maxQueuedTime = queueTime;
-                    } finally {
-                        this.maxQueueTimeLock.writeLock().unlock();
-                    }
-                }
-            } finally {
-                if (!unlocked)
-                    this.maxQueueTimeLock.readLock().unlock();
-            }
-        }
-    }
-
-
-    /**
-     * Element wrapper to measure time waiting on the queue
-     *
-     * @param <E>
-     */
-    private class ThroughputElement<E> {
-
-        private long queuedTime;
-        private E element;
-
-        protected ThroughputElement(E element) {
-            this.element = element;
-            this.queuedTime = System.currentTimeMillis();
-        }
-
-        /**
-         * Get the time this element has been waiting on the queue.
-         * current time - time element was queued
-         *
-         * @return time this element has been waiting on the queue in milliseconds
-         */
-        public long getWaited() {
-            return System.currentTimeMillis() - this.queuedTime;
-        }
-
-        /**
-         * Get the queued element
-         *
-         * @return the element
-         */
-        public E getElement() {
-            return this.element;
-        }
-
-
-        /**
-         * Measures equality by the element and ignores the queued time
-         * @param obj
-         * @return
-         */
-        @Override
-        public boolean equals(Object obj) {
-            if(obj instanceof ThroughputElement && obj != null) {
-                ThroughputElement that = (ThroughputElement) obj;
-                if(that.getElement() == null && this.getElement() == null) {
-                    return true;
-                } else if(that.getElement() != null) {
-                    return that.getElement().equals(this.getElement());
-                } else {
-                    return false;
-                }
-            }
-            return false;
+          return false;
         }
+      }
+      return false;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueueMXBean.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueueMXBean.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueueMXBean.java
index 571a035..9cc4593 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueueMXBean.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/queues/ThroughputQueueMXBean.java
@@ -17,50 +17,48 @@
  */
 package org.apache.streams.local.queues;
 
-import javax.management.MXBean;
-
 /**
  * MXBean capable queue that monitors the throughput of the queue
  */
 public interface ThroughputQueueMXBean {
 
-    /**
-     * Returns the number of items on the queue.
-     * @return number of items on queue
-     */ 
-    public long getCurrentSize();
+  /**
+   * Returns the number of items on the queue.
+   * @return number of items on queue
+   */
+  public long getCurrentSize();
 
-    /**
-     * Get the average time an item spends in queue in milliseconds
-     * @return average time an item spends in queue in milliseconds
-     */
-    public double getAvgWait();
+  /**
+   * Get the average time an item spends in queue in milliseconds
+   * @return average time an item spends in queue in milliseconds
+   */
+  public double getAvgWait();
 
-    /**
-     * Get the maximum time an item has spent on the queue before being removed from the queue.
-     * @return the maximum time an item has spent on the queue
-     */
-    public long getMaxWait();
+  /**
+   * Get the maximum time an item has spent on the queue before being removed from the queue.
+   * @return the maximum time an item has spent on the queue
+   */
+  public long getMaxWait();
 
-    /**
-     * Get the number of items that have been removed from this queue
-     * @return number of items that have been removed from the queue
-     */
-    public long getRemoved();
+  /**
+   * Get the number of items that have been removed from this queue
+   * @return number of items that have been removed from the queue
+   */
+  public long getRemoved();
 
-    /**
-     * Get the number of items that have been added to the queue
-     * @return number of items that have been added to the queue
-     */
-    public long getAdded();
+  /**
+   * Get the number of items that have been added to the queue
+   * @return number of items that have been added to the queue
+   */
+  public long getAdded();
 
-    /**
-     * Get the the throughput of the queue measured by the number of items removed from the queue
-     * dived by the time the queue has been active.
-     * Active time starts once the first item has been placed on the queue
-     * @return throughput of queue. items/sec, items removed / time active
-     */
-    public double getThroughput();
+  /**
+   * Get the the throughput of the queue measured by the number of items removed from the queue
+   * dived by the time the queue has been active.
+   * Active time starts once the first item has been placed on the queue
+   * @return throughput of queue. items/sec, items removed / time active
+   */
+  public double getThroughput();
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/BaseStreamsTask.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/BaseStreamsTask.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/BaseStreamsTask.java
index 8bc7769..5f4bfdb 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/BaseStreamsTask.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/BaseStreamsTask.java
@@ -18,200 +18,202 @@
 
 package org.apache.streams.local.tasks;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.local.builders.LocalStreamBuilder;
 import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.util.ComponentUtils;
 import org.apache.streams.util.SerializationUtil;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.Serializable;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Queue;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.TimeUnit;
 
 /**
- *
+ * BaseStreamsTask is the primary abstract StreamsTask.
  */
 public abstract class BaseStreamsTask implements StreamsTask {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(BaseStreamsTask.class);
-
-    private List<BlockingQueue<StreamsDatum>> inQueues = new ArrayList<BlockingQueue<StreamsDatum>>();
-    private List<BlockingQueue<StreamsDatum>> outQueues = new LinkedList<BlockingQueue<StreamsDatum>>();
-    private int inIndex = 0;
-    private ObjectMapper mapper;
-    protected StreamsConfiguration streamConfig;
-
-    public BaseStreamsTask(StreamsConfiguration config) {
-        this.mapper = StreamsJacksonMapper.getInstance();
-        this.mapper.registerSubtypes(Activity.class);
-        if( config != null )
-            this.streamConfig = config;
-        else
-            this.streamConfig = StreamsConfigurator.detectConfiguration();
-
-        setStartedAt();
-    }
-
-
-    @Override
-    public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue) {
-        this.inQueues.add(inputQueue);
-    }
-
-    @Override
-    public void addOutputQueue(BlockingQueue<StreamsDatum> outputQueue) {
-        this.outQueues.add(outputQueue);
+  private static final Logger LOGGER = LoggerFactory.getLogger(BaseStreamsTask.class);
+
+  private List<BlockingQueue<StreamsDatum>> inQueues = new ArrayList<BlockingQueue<StreamsDatum>>();
+  private List<BlockingQueue<StreamsDatum>> outQueues = new LinkedList<BlockingQueue<StreamsDatum>>();
+  private int inIndex = 0;
+  private ObjectMapper mapper;
+  protected StreamsConfiguration streamConfig;
+
+  public BaseStreamsTask(StreamsConfiguration config) {
+    this.mapper = StreamsJacksonMapper.getInstance();
+    this.mapper.registerSubtypes(Activity.class);
+    if( config != null ) {
+      this.streamConfig = config;
+    } else {
+      this.streamConfig = StreamsConfigurator.detectConfiguration();
     }
-
-    @Override
-    public List<BlockingQueue<StreamsDatum>> getInputQueues() {
-        return this.inQueues;
+    setStartedAt();
+  }
+
+  @Override
+  public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue) {
+    this.inQueues.add(inputQueue);
+  }
+
+  @Override
+  public void addOutputQueue(BlockingQueue<StreamsDatum> outputQueue) {
+    this.outQueues.add(outputQueue);
+  }
+
+  @Override
+  public List<BlockingQueue<StreamsDatum>> getInputQueues() {
+    return this.inQueues;
+  }
+
+  @Override
+  public List<BlockingQueue<StreamsDatum>> getOutputQueues() {
+    return this.outQueues;
+  }
+
+  /**
+   * SHOULD NOT BE NECCESARY, WILL REMOVE.
+   * Round Robins through input queues to get the next StreamsDatum. If all input queues are empty, it will return null.
+   * @return the next StreamsDatum or null if all input queues are empty.
+   */
+  @Deprecated
+  protected StreamsDatum getNextDatum() {
+    int startIndex = this.inIndex;
+    int index = startIndex;
+    StreamsDatum datum = null;
+    do {
+      datum = this.inQueues.get(index).poll();
+      index = getNextInputQueueIndex();
+    } while( datum == null && startIndex != index);
+    return datum;
+  }
+
+  /**
+   * Adds a StreamDatum to the outgoing queues.  If there are multiple queues, it uses serialization to create
+   * clones of the datum and adds a new clone to each queue.
+   * @param datum
+   */
+  protected void addToOutgoingQueue(StreamsDatum datum) throws InterruptedException{
+    if(this.outQueues.size() == 1) {
+      outQueues.get(0).put(datum);
     }
-
-    @Override
-    public List<BlockingQueue<StreamsDatum>> getOutputQueues() {
-        return this.outQueues;
-    }
-
-    /**
-     * SHOULD NOT BE NECCESARY, WILL REMOVE.
-     * Round Robins through input queues to get the next StreamsDatum. If all input queues are empty, it will return null.
-     * @return the next StreamsDatum or null if all input queues are empty.
-     */
-    @Deprecated
-    protected StreamsDatum getNextDatum() {
-        int startIndex = this.inIndex;
-        int index = startIndex;
-        StreamsDatum datum = null;
-        do {
-            datum = this.inQueues.get(index).poll();
-            index = getNextInputQueueIndex();
-        } while( datum == null && startIndex != index);
-        return datum;
-    }
-
-    /**
-     * Adds a StreamDatum to the outgoing queues.  If there are multiple queues, it uses serialization to create
-     * clones of the datum and adds a new clone to each queue.
-     * @param datum
-     */
-    protected void addToOutgoingQueue(StreamsDatum datum) throws InterruptedException{
-        if(this.outQueues.size() == 1) {
-            outQueues.get(0).put(datum);
-        }
-        else {
-            List<BlockingQueue<StreamsDatum>> toOutput = Lists.newLinkedList(this.outQueues);
-            while(!toOutput.isEmpty()) {
-                for (BlockingQueue<StreamsDatum> queue : toOutput) {
-                    StreamsDatum newDatum = cloneStreamsDatum(datum);
-                    if (newDatum != null) {
-                        if (queue.offer(newDatum, 500, TimeUnit.MILLISECONDS)) {
-                            toOutput.remove(queue);
-                        }
-                    }
-                }
+    else {
+      List<BlockingQueue<StreamsDatum>> toOutput = Lists.newLinkedList(this.outQueues);
+      while(!toOutput.isEmpty()) {
+        for (BlockingQueue<StreamsDatum> queue : toOutput) {
+          StreamsDatum newDatum = cloneStreamsDatum(datum);
+          if (newDatum != null) {
+            if (queue.offer(newDatum, 500, TimeUnit.MILLISECONDS)) {
+              toOutput.remove(queue);
             }
+          }
         }
+      }
     }
+  }
 
-    @Override
-    public boolean isWaiting() {
-        if(this.inQueues == null || this.inQueues.size() == 0) {
-            return true;
-        }
-        boolean empty = true;
-        for(Queue queue : this.inQueues) {
-            empty = empty && queue.isEmpty();
-        }
-        return empty;
+  @Override
+  public boolean isWaiting() {
+    if(this.inQueues == null || this.inQueues.size() == 0) {
+      return true;
     }
-
-    /**
-     * //TODO LOCAL MODE HACK. Need to fix
-     * In order for our data streams to ported to other data flow frame works(Storm, Hadoop, Spark, etc) we need to be able to
-     * enforce the serialization required by each framework.  This needs some thought and design before a final solution is
-     * made.
-     *
-     * In order to be able to copy/clone StreamDatums the orginal idea was to force all StreamsDatums to be java serializable.
-     * This was seen as unacceptable for local mode.  So until we come up with a solution to enforce serialization and be
-     * compatiable across multiple frame works, this hack is in place.
-     *
-     * If datum.document is Serializable, we use serialization to clone a new copy.  If it is not Serializable we attempt
-     * different methods using an com.fasterxml.jackson.databind.ObjectMapper to copy/clone the StreamsDatum. If the object
-     * is not clonable by these methods, an error is reported to the logging and a NULL object is returned.
-     *
-     * @param datum
-     * @return
-     */
-    protected StreamsDatum cloneStreamsDatum(StreamsDatum datum) {
-        try {
-
-            if(datum.document instanceof ObjectNode) {
-                return copyMetaData(datum, new StreamsDatum(((ObjectNode) datum.document).deepCopy(), datum.timestamp, datum.sequenceid));
-            }
-            else if(datum.document instanceof Activity) {
-
-                return copyMetaData(datum, new StreamsDatum(this.mapper.readValue(this.mapper.writeValueAsString(datum.document), Activity.class),
-                                        datum.timestamp,
-                                        datum.sequenceid));
-            }
+    boolean empty = true;
+    for(Queue queue : this.inQueues) {
+      empty = empty && queue.isEmpty();
+    }
+    return empty;
+  }
+
+  /**
+   * //TODO LOCAL MODE HACK. Need to fix
+   * In order for our data streams to ported to other data flow frame works(Storm, Hadoop, Spark, etc) we need to be able to
+   * enforce the serialization required by each framework.  This needs some thought and design before a final solution is
+   * made.
+   *
+   * In order to be able to copy/clone StreamDatums the orginal idea was to force all StreamsDatums to be java serializable.
+   * This was seen as unacceptable for local mode.  So until we come up with a solution to enforce serialization and be
+   * compatiable across multiple frame works, this hack is in place.
+   *
+   * If datum.document is Serializable, we use serialization to clone a new copy.  If it is not Serializable we attempt
+   * different methods using an com.fasterxml.jackson.databind.ObjectMapper to copy/clone the StreamsDatum. If the object
+   * is not clonable by these methods, an error is reported to the logging and a NULL object is returned.
+   *
+   * @param datum
+   * @return
+   */
+  protected StreamsDatum cloneStreamsDatum(StreamsDatum datum) {
+    try {
+
+      if(datum.document instanceof ObjectNode) {
+        return copyMetaData(datum, new StreamsDatum(((ObjectNode) datum.document).deepCopy(), datum.timestamp, datum.sequenceid));
+      }
+      else if(datum.document instanceof Activity) {
+
+        return copyMetaData(datum, new StreamsDatum(this.mapper.readValue(this.mapper.writeValueAsString(datum.document), Activity.class),
+            datum.timestamp,
+            datum.sequenceid));
+      }
 //            else if(this.mapper.canSerialize(datum.document.getClass())){
 //                return new StreamsDatum(this.mapper.readValue(this.mapper.writeValueAsString(datum.document), datum.document.getClass()),
 //                                        datum.timestamp,
 //                                        datum.sequenceid);
 //            }
 
-            else if(datum.document instanceof Serializable) {
-                return (StreamsDatum) SerializationUtil.cloneBySerialization(datum);
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to clone/copy StreamsDatum : {}", e);
-        }
-        LOGGER.error("Failed to clone/copy StreamsDatum with document of class : {}", datum.document.getClass().getName());
-        return null;
+      else if(datum.document instanceof Serializable) {
+        return (StreamsDatum) SerializationUtil.cloneBySerialization(datum);
+      }
+    } catch (Exception e) {
+      LOGGER.error("Exception while trying to clone/copy StreamsDatum : {}", e);
     }
-
-    private int getNextInputQueueIndex() {
-        ++this.inIndex;
-        if(this.inIndex >= this.inQueues.size()) {
-            this.inIndex = 0;
-        }
-        return this.inIndex;
+    LOGGER.error("Failed to clone/copy StreamsDatum with document of class : {}", datum.document.getClass().getName());
+    return null;
+  }
+
+  private int getNextInputQueueIndex() {
+    ++this.inIndex;
+    if(this.inIndex >= this.inQueues.size()) {
+      this.inIndex = 0;
     }
-
-    private StreamsDatum copyMetaData(StreamsDatum copyFrom, StreamsDatum copyTo) {
-        Map<String, Object> fromMeta = copyFrom.getMetadata();
-        Map<String, Object> toMeta = copyTo.getMetadata();
-        for(String key : fromMeta.keySet()) {
-            Object value = fromMeta.get(key);
-            if(value instanceof Serializable)
-                toMeta.put(key, SerializationUtil.cloneBySerialization(value));
-            else //hope for the best - should be serializable
-                toMeta.put(key, value);
-        }
-        return copyTo;
+    return this.inIndex;
+  }
+
+  private StreamsDatum copyMetaData(StreamsDatum copyFrom, StreamsDatum copyTo) {
+    Map<String, Object> fromMeta = copyFrom.getMetadata();
+    Map<String, Object> toMeta = copyTo.getMetadata();
+    for(String key : fromMeta.keySet()) {
+      Object value = fromMeta.get(key);
+      if(value instanceof Serializable)
+        toMeta.put(key, SerializationUtil.cloneBySerialization(value));
+      else //hope for the best - should be serializable
+        toMeta.put(key, value);
     }
+    return copyTo;
+  }
 
-    public long getStartedAt() {
-        return streamConfig.getStartedAt();
-    }
+  public long getStartedAt() {
+    return streamConfig.getStartedAt();
+  }
 
-    public void setStartedAt() {
-        streamConfig.setStartedAt(DateTime.now().getMillis());
-    }
+  public void setStartedAt() {
+    streamConfig.setStartedAt(DateTime.now().getMillis());
+  }
 
-    public String getStreamIdentifier() {
-        return streamConfig.getIdentifier();
-    }
+  public String getStreamIdentifier() {
+    return streamConfig.getIdentifier();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/LocalStreamProcessMonitorThread.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/LocalStreamProcessMonitorThread.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/LocalStreamProcessMonitorThread.java
index e93ee1d..6df9767 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/LocalStreamProcessMonitorThread.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/LocalStreamProcessMonitorThread.java
@@ -28,67 +28,67 @@ import java.util.concurrent.Executor;
 @Deprecated
 public class LocalStreamProcessMonitorThread implements StatusCounterMonitorRunnable
 {
-    private static final Logger LOGGER = LoggerFactory.getLogger(LocalStreamProcessMonitorThread.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(LocalStreamProcessMonitorThread.class);
 
-    private Executor executor;
+  private Executor executor;
 
-    private int seconds;
+  private int seconds;
 
-    private boolean run = true;
+  private boolean run = true;
 
-    public LocalStreamProcessMonitorThread(Executor executor, int delayInSeconds) {
-        this.executor = executor;
-        this.seconds = delayInSeconds;
-    }
+  public LocalStreamProcessMonitorThread(Executor executor, int delayInSeconds) {
+    this.executor = executor;
+    this.seconds = delayInSeconds;
+  }
 
-    @Override
-    public void shutdown(){
-        this.run = false;
-    }
+  @Override
+  public void shutdown(){
+    this.run = false;
+  }
 
-    @Override
-    public boolean isRunning() {
-        return this.run;
-    }
+  @Override
+  public boolean isRunning() {
+    return this.run;
+  }
 
-    @Override
-    public void run()
-    {
-        while(run){
-
-            /**
-             *
-             * Note:
-             * Quick class and method to let us see what is going on with the JVM. We need to make sure
-             * that everything is running with as little memory as possible. If we are generating a heap
-             * overflow, this will be very apparent by the information shown here.
-             */
-
-            MemoryUsage memoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
-
-            String maxMemory = memoryUsage.getMax() == Long.MAX_VALUE ? "NO_LIMIT" :
-                    humanReadableByteCount(memoryUsage.getMax(), true);
-
-            String usedMemory = humanReadableByteCount(memoryUsage.getUsed(), true);
-
-            LOGGER.debug("[monitor] Used Memory: {}, Max: {}",
-                    usedMemory,
-                    maxMemory);
-
-            try
-            {
-                Thread.sleep(seconds*1000);
-            }
-            catch (InterruptedException e)
-            { }
-        }
-    }
+  @Override
+  public void run()
+  {
+    while(run){
+
+      /**
+       *
+       * Note:
+       * Quick class and method to let us see what is going on with the JVM. We need to make sure
+       * that everything is running with as little memory as possible. If we are generating a heap
+       * overflow, this will be very apparent by the information shown here.
+       */
+
+      MemoryUsage memoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
+
+      String maxMemory = memoryUsage.getMax() == Long.MAX_VALUE ? "NO_LIMIT" :
+          humanReadableByteCount(memoryUsage.getMax(), true);
+
+      String usedMemory = humanReadableByteCount(memoryUsage.getUsed(), true);
+
+      LOGGER.debug("[monitor] Used Memory: {}, Max: {}",
+          usedMemory,
+          maxMemory);
 
-    public String humanReadableByteCount(long bytes, boolean si) {
-        int unit = si ? 1000 : 1024;
-        if (bytes < unit) return bytes + " B";
-        int exp = (int) (Math.log(bytes) / Math.log(unit));
-        String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp-1) + (si ? "" : "i");
-        return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre);
+      try
+      {
+        Thread.sleep(seconds*1000);
+      }
+      catch (InterruptedException e)
+      { }
     }
+  }
+
+  public String humanReadableByteCount(long bytes, boolean si) {
+    int unit = si ? 1000 : 1024;
+    if (bytes < unit) return bytes + " B";
+    int exp = (int) (Math.log(bytes) / Math.log(unit));
+    String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp-1) + (si ? "" : "i");
+    return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorRunnable.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorRunnable.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorRunnable.java
index 5d4d8b5..9dc91d3 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorRunnable.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorRunnable.java
@@ -20,6 +20,6 @@ package org.apache.streams.local.tasks;
 
 @Deprecated
 public interface StatusCounterMonitorRunnable extends Runnable {
-    void shutdown();
-    boolean isRunning();
+  void shutdown();
+  boolean isRunning();
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorThread.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorThread.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorThread.java
index c5413db..d59f0d6 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorThread.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StatusCounterMonitorThread.java
@@ -19,60 +19,61 @@
 package org.apache.streams.local.tasks;
 
 import org.apache.streams.core.DatumStatusCountable;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @Deprecated
 public class StatusCounterMonitorThread implements StatusCounterMonitorRunnable {
-    private static final Logger LOGGER = LoggerFactory.getLogger(StatusCounterMonitorThread.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StatusCounterMonitorThread.class);
 
-    private DatumStatusCountable task;
+  private DatumStatusCountable task;
 
-    private int seconds;
+  private int seconds;
 
-    private boolean run = true;
+  private boolean run = true;
 
-    public StatusCounterMonitorThread(DatumStatusCountable task, int delayInSeconds) {
-        this.task = task;
-        this.seconds = delayInSeconds;
-    }
+  public StatusCounterMonitorThread(DatumStatusCountable task, int delayInSeconds) {
+    this.task = task;
+    this.seconds = delayInSeconds;
+  }
 
-    @Override
-    public void shutdown() {
-        this.run = false;
-    }
+  @Override
+  public void shutdown() {
+    this.run = false;
+  }
 
-    @Override
-    public boolean isRunning() {
-        return this.run;
-    }
+  @Override
+  public boolean isRunning() {
+    return this.run;
+  }
 
-    @Override
-    public void run() {
-        while(run) {
+  @Override
+  public void run() {
+    while(run) {
 
-            /**
-             *
-             * Note:
-             * Quick class and method to let us see what is going on with the JVM. We need to make sure
-             * that everything is running with as little memory as possible. If we are generating a heap
-             * overflow, this will be very apparent by the information shown here.
-             */
+      /**
+       *
+       * Note:
+       * Quick class and method to let us see what is going on with the JVM. We need to make sure
+       * that everything is running with as little memory as possible. If we are generating a heap
+       * overflow, this will be very apparent by the information shown here.
+       */
 
-            LOGGER.debug("{}: {} attempted, {} success, {} partial, {} failed, {} total",
-                    task.getClass(),
-                    task.getDatumStatusCounter().getAttempted(),
-                    task.getDatumStatusCounter().getSuccess(),
-                    task.getDatumStatusCounter().getPartial(),
-                    task.getDatumStatusCounter().getFail(),
-                    task.getDatumStatusCounter().getEmitted());
+      LOGGER.debug("{}: {} attempted, {} success, {} partial, {} failed, {} total",
+          task.getClass(),
+          task.getDatumStatusCounter().getAttempted(),
+          task.getDatumStatusCounter().getSuccess(),
+          task.getDatumStatusCounter().getPartial(),
+          task.getDatumStatusCounter().getFail(),
+          task.getDatumStatusCounter().getEmitted());
 
-            try {
-                Thread.sleep(seconds*1000);
-            }
-            catch (InterruptedException e){
-                shutdown();
-            }
-        }
+      try {
+        Thread.sleep(seconds*1000);
+      }
+      catch (InterruptedException e){
+        shutdown();
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsMergeTask.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsMergeTask.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsMergeTask.java
index 69cd5a5..473d2f4 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsMergeTask.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsMergeTask.java
@@ -21,9 +21,9 @@ package org.apache.streams.local.tasks;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.local.counters.StreamsTaskCounter;
+
 import org.apache.commons.lang.NotImplementedException;
 
-import java.util.Map;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 /**
@@ -34,57 +34,57 @@ import java.util.concurrent.atomic.AtomicBoolean;
 @Deprecated
 public class StreamsMergeTask extends BaseStreamsTask {
 
-    private AtomicBoolean keepRunning;
-    private long sleepTime;
+  private AtomicBoolean keepRunning;
+  private long sleepTime;
 
-    public StreamsMergeTask() {
-        this(null);
-    }
+  public StreamsMergeTask() {
+    this(null);
+  }
 
-    public StreamsMergeTask(StreamsConfiguration streamConfig) {
-        super(streamConfig);
-        this.sleepTime = sleepTime;
-        this.keepRunning = new AtomicBoolean(true);
-    }
+  public StreamsMergeTask(StreamsConfiguration streamConfig) {
+    super(streamConfig);
+    this.sleepTime = sleepTime;
+    this.keepRunning = new AtomicBoolean(true);
+  }
 
-    @Override
-    public void stopTask() {
-        this.keepRunning.set(false);
-    }
+  @Override
+  public void stopTask() {
+    this.keepRunning.set(false);
+  }
 
-    @Override
-    public void setStreamConfig(StreamsConfiguration config) {
+  @Override
+  public void setStreamConfig(StreamsConfiguration config) {
 
-    }
+  }
 
-    @Override
-    public boolean isRunning() {
-        return false;
-    }
+  @Override
+  public boolean isRunning() {
+    return false;
+  }
 
-    @Override
-    public void run() {
-        while(this.keepRunning.get()) {
-            StreamsDatum datum = super.getNextDatum();
-            if(datum != null) {
-                try {
-                    super.addToOutgoingQueue(datum);
-                } catch (InterruptedException ie) {
-                    Thread.currentThread().interrupt();
-                }
-            }
-            else {
-                try {
-                    Thread.sleep(this.sleepTime);
-                } catch (InterruptedException e) {
-                    this.keepRunning.set(false);
-                }
-            }
+  @Override
+  public void run() {
+    while(this.keepRunning.get()) {
+      StreamsDatum datum = super.getNextDatum();
+      if(datum != null) {
+        try {
+          super.addToOutgoingQueue(datum);
+        } catch (InterruptedException ie) {
+          Thread.currentThread().interrupt();
         }
+      }
+      else {
+        try {
+          Thread.sleep(this.sleepTime);
+        } catch (InterruptedException e) {
+          this.keepRunning.set(false);
+        }
+      }
     }
+  }
 
-    @Override
-    public void setStreamsTaskCounter(StreamsTaskCounter counter) {
-        throw new NotImplementedException();
-    }
+  @Override
+  public void setStreamsTaskCounter(StreamsTaskCounter counter) {
+    throw new NotImplementedException();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsPersistWriterTask.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsPersistWriterTask.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsPersistWriterTask.java
index fb97218..5c918b2 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsPersistWriterTask.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsPersistWriterTask.java
@@ -18,15 +18,22 @@
 
 package org.apache.streams.local.tasks;
 
-import com.google.common.util.concurrent.Uninterruptibles;
 import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.core.*;
+import org.apache.streams.core.DatumStatus;
+import org.apache.streams.core.DatumStatusCountable;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistWriter;
 import org.apache.streams.core.util.DatumUtils;
 import org.apache.streams.local.counters.StreamsTaskCounter;
+
+import com.google.common.util.concurrent.Uninterruptibles;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.UUID;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -36,136 +43,136 @@ import java.util.concurrent.atomic.AtomicBoolean;
  */
 public class StreamsPersistWriterTask extends BaseStreamsTask implements DatumStatusCountable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPersistWriterTask.class);
-
-    private StreamsPersistWriter writer;
-    private AtomicBoolean keepRunning;
-    private StreamsConfiguration streamConfig;
-    private BlockingQueue<StreamsDatum> inQueue;
-    private AtomicBoolean isRunning;
-    private AtomicBoolean blocked;
-    private StreamsTaskCounter counter;
-
-    private DatumStatusCounter statusCounter = new DatumStatusCounter();
-
-    @Override
-    public DatumStatusCounter getDatumStatusCounter() {
-        return this.statusCounter;
-    }
-
-
-    /**
-     * Default constructor.  Uses default sleep of 500ms when inbound queue is empty.
-     * @param writer writer to execute in task
-     */
-    public StreamsPersistWriterTask(StreamsPersistWriter writer) {
-        this(writer, null);
-    }
-
-    /**
-     *
-     * @param writer writer to execute in task
-     * @param streamConfig stream config
-     */
-    public StreamsPersistWriterTask(StreamsPersistWriter writer, StreamsConfiguration streamConfig) {
-        super(streamConfig);
-        this.streamConfig = super.streamConfig;
-        this.writer = writer;
-        this.keepRunning = new AtomicBoolean(true);
-        this.isRunning = new AtomicBoolean(true);
-        this.blocked = new AtomicBoolean(false);
-    }
-
-    @Override
-    public boolean isWaiting() {
-        return this.inQueue.isEmpty() && this.blocked.get();
-    }
-
-    @Override
-    public void setStreamConfig(StreamsConfiguration config) {
-        this.streamConfig = config;
-    }
-
-    @Override
-    public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue) {
-        this.inQueue = inputQueue;
-    }
-
-    @Override
-    public boolean isRunning() {
-        return this.isRunning.get();
-    }
-
-    @Override
-    public void run() {
+  private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPersistWriterTask.class);
+
+  private StreamsPersistWriter writer;
+  private AtomicBoolean keepRunning;
+  private StreamsConfiguration streamConfig;
+  private BlockingQueue<StreamsDatum> inQueue;
+  private AtomicBoolean isRunning;
+  private AtomicBoolean blocked;
+  private StreamsTaskCounter counter;
+
+  private DatumStatusCounter statusCounter = new DatumStatusCounter();
+
+  @Override
+  public DatumStatusCounter getDatumStatusCounter() {
+    return this.statusCounter;
+  }
+
+
+  /**
+   * Default constructor.  Uses default sleep of 500ms when inbound queue is empty.
+   * @param writer writer to execute in task
+   */
+  public StreamsPersistWriterTask(StreamsPersistWriter writer) {
+    this(writer, null);
+  }
+
+  /**
+   *
+   * @param writer writer to execute in task
+   * @param streamConfig stream config
+   */
+  public StreamsPersistWriterTask(StreamsPersistWriter writer, StreamsConfiguration streamConfig) {
+    super(streamConfig);
+    this.streamConfig = super.streamConfig;
+    this.writer = writer;
+    this.keepRunning = new AtomicBoolean(true);
+    this.isRunning = new AtomicBoolean(true);
+    this.blocked = new AtomicBoolean(false);
+  }
+
+  @Override
+  public boolean isWaiting() {
+    return this.inQueue.isEmpty() && this.blocked.get();
+  }
+
+  @Override
+  public void setStreamConfig(StreamsConfiguration config) {
+    this.streamConfig = config;
+  }
+
+  @Override
+  public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue) {
+    this.inQueue = inputQueue;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return this.isRunning.get();
+  }
+
+  @Override
+  public void run() {
+    try {
+      this.writer.prepare(this.streamConfig);
+      if(this.counter == null) {
+        this.counter = new StreamsTaskCounter(this.writer.getClass().getName()+ UUID.randomUUID().toString(), getStreamIdentifier(), getStartedAt());
+      }
+      while(this.keepRunning.get()) {
+        StreamsDatum datum = null;
         try {
-            this.writer.prepare(this.streamConfig);
-            if(this.counter == null) {
-                this.counter = new StreamsTaskCounter(this.writer.getClass().getName()+ UUID.randomUUID().toString(), getStreamIdentifier(), getStartedAt());
-            }
-            while(this.keepRunning.get()) {
-                StreamsDatum datum = null;
-                try {
-                    this.blocked.set(true);
-                    datum = this.inQueue.poll(5, TimeUnit.SECONDS);
-                } catch (InterruptedException ie) {
-                    LOGGER.debug("Received InterruptedException. Shutting down and re-applying interrupt status.");
-                    this.keepRunning.set(false);
-                    if(!this.inQueue.isEmpty()) {
-                        LOGGER.error("Received InteruptedException and input queue still has data, count={}, processor={}",this.inQueue.size(), this.writer.getClass().getName());
-                    }
-                    Thread.currentThread().interrupt();
-                } finally {
-                    this.blocked.set(false);
-                }
-                if(datum != null) {
-                    this.counter.incrementReceivedCount();
-                    try {
-                        long startTime = System.currentTimeMillis();
-                        this.writer.write(datum);
-                        this.counter.addTime(System.currentTimeMillis() - startTime);
-                        statusCounter.incrementStatus(DatumStatus.SUCCESS);
-                    } catch (Exception e) {
-                        LOGGER.error("Error writing to persist writer {}", this.writer.getClass().getSimpleName(), e);
-                        this.keepRunning.set(false); // why do we shutdown on a failed write ?
-                        statusCounter.incrementStatus(DatumStatus.FAIL);
-                        DatumUtils.addErrorToMetadata(datum, e, this.writer.getClass());
-                        this.counter.incrementErrorCount();
-                    }
-                } else { //datums should never be null
-                    LOGGER.trace("Received null StreamsDatum @ writer : {}", this.writer.getClass().getName());
-                }
-            }
-            Uninterruptibles.sleepUninterruptibly(streamConfig.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-        } catch(Throwable e) {
-            LOGGER.error("Caught Throwable in Persist Writer {} : {}", this.writer.getClass().getSimpleName(), e);
+          this.blocked.set(true);
+          datum = this.inQueue.poll(5, TimeUnit.SECONDS);
+        } catch (InterruptedException ie) {
+          LOGGER.debug("Received InterruptedException. Shutting down and re-applying interrupt status.");
+          this.keepRunning.set(false);
+          if(!this.inQueue.isEmpty()) {
+            LOGGER.error("Received InteruptedException and input queue still has data, count={}, processor={}",this.inQueue.size(), this.writer.getClass().getName());
+          }
+          Thread.currentThread().interrupt();
         } finally {
-            Uninterruptibles.sleepUninterruptibly(streamConfig.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            this.writer.cleanUp();
-            this.isRunning.set(false);
+          this.blocked.set(false);
         }
+        if(datum != null) {
+          this.counter.incrementReceivedCount();
+          try {
+            long startTime = System.currentTimeMillis();
+            this.writer.write(datum);
+            this.counter.addTime(System.currentTimeMillis() - startTime);
+            statusCounter.incrementStatus(DatumStatus.SUCCESS);
+          } catch (Exception e) {
+            LOGGER.error("Error writing to persist writer {}", this.writer.getClass().getSimpleName(), e);
+            this.keepRunning.set(false); // why do we shutdown on a failed write ?
+            statusCounter.incrementStatus(DatumStatus.FAIL);
+            DatumUtils.addErrorToMetadata(datum, e, this.writer.getClass());
+            this.counter.incrementErrorCount();
+          }
+        } else { //datums should never be null
+          LOGGER.trace("Received null StreamsDatum @ writer : {}", this.writer.getClass().getName());
+        }
+      }
+      Uninterruptibles.sleepUninterruptibly(streamConfig.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+    } catch(Throwable e) {
+      LOGGER.error("Caught Throwable in Persist Writer {} : {}", this.writer.getClass().getSimpleName(), e);
+    } finally {
+      Uninterruptibles.sleepUninterruptibly(streamConfig.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      this.writer.cleanUp();
+      this.isRunning.set(false);
     }
-
-    @Override
-    public void stopTask() {
-        this.keepRunning.set(false);
-    }
-
-
-    @Override
-    public void addOutputQueue(BlockingQueue<StreamsDatum> outputQueue) {
-        throw new UnsupportedOperationException(this.getClass().getName()+" does not support method - setOutputQueue()");
-    }
-
-    @Override
-    public List<BlockingQueue<StreamsDatum>> getInputQueues() {
-        List<BlockingQueue<StreamsDatum>> queues = new LinkedList<>();
-        queues.add(this.inQueue);
-        return queues;
-    }
-
-    @Override
-    public void setStreamsTaskCounter(StreamsTaskCounter counter) {
-        this.counter = counter;
-    }
+  }
+
+  @Override
+  public void stopTask() {
+    this.keepRunning.set(false);
+  }
+
+
+  @Override
+  public void addOutputQueue(BlockingQueue<StreamsDatum> outputQueue) {
+    throw new UnsupportedOperationException(this.getClass().getName()+" does not support method - setOutputQueue()");
+  }
+
+  @Override
+  public List<BlockingQueue<StreamsDatum>> getInputQueues() {
+    List<BlockingQueue<StreamsDatum>> queues = new LinkedList<>();
+    queues.add(this.inQueue);
+    return queues;
+  }
+
+  @Override
+  public void setStreamsTaskCounter(StreamsTaskCounter counter) {
+    this.counter = counter;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProcessorTask.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProcessorTask.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProcessorTask.java
index 137c7e1..8720c68 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProcessorTask.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProcessorTask.java
@@ -18,15 +18,21 @@
 
 package org.apache.streams.local.tasks;
 
-import com.google.common.collect.Maps;
 import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.core.*;
+import org.apache.streams.core.DatumStatus;
+import org.apache.streams.core.DatumStatusCountable;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.core.util.DatumUtils;
 import org.apache.streams.local.counters.StreamsTaskCounter;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.UUID;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -36,139 +42,139 @@ import java.util.concurrent.atomic.AtomicBoolean;
  */
 public class StreamsProcessorTask extends BaseStreamsTask implements DatumStatusCountable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsProcessorTask.class);
-
-
-    private StreamsProcessor processor;
-    private AtomicBoolean keepRunning;
-    private StreamsConfiguration streamConfig;
-    private BlockingQueue<StreamsDatum> inQueue;
-    private AtomicBoolean isRunning;
-    private AtomicBoolean blocked;
-    private StreamsTaskCounter counter;
-
-    private DatumStatusCounter statusCounter = new DatumStatusCounter();
-
-    @Override
-    public DatumStatusCounter getDatumStatusCounter() {
-        return this.statusCounter;
-    }
-
-    /**
-     * Default constructor, uses default sleep time of 500ms when inbound queue is empty
-     * @param processor process to run in task
-     */
-    public StreamsProcessorTask(StreamsProcessor processor) {
-        this(processor, new StreamsConfiguration());
-    }
-
-    /**
-     * @param processor
-     * @param streamConfig
-     */
-    public StreamsProcessorTask(StreamsProcessor processor, StreamsConfiguration streamConfig) {
-        super(streamConfig);
-        this.streamConfig = super.streamConfig;
-        this.processor = processor;
-        this.keepRunning = new AtomicBoolean(true);
-        this.isRunning = new AtomicBoolean(true);
-        this.blocked = new AtomicBoolean(true);
-    }
-
-    @Override
-    public boolean isWaiting() {
-        return this.inQueue.isEmpty() && this.blocked.get();
-    }
-
-    @Override
-    public void stopTask() {
-        this.keepRunning.set(false);
-    }
-
-    @Override
-    public void setStreamConfig(StreamsConfiguration config) {
-        this.streamConfig = config;
-    }
-
-    @Override
-    public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue) {
-        this.inQueue = inputQueue;
-    }
-
-    @Override
-    public boolean isRunning() {
-        return this.isRunning.get();
-    }
-
-    @Override
-    public void run() {
+  private final static Logger LOGGER = LoggerFactory.getLogger(StreamsProcessorTask.class);
+
+
+  private StreamsProcessor processor;
+  private AtomicBoolean keepRunning;
+  private StreamsConfiguration streamConfig;
+  private BlockingQueue<StreamsDatum> inQueue;
+  private AtomicBoolean isRunning;
+  private AtomicBoolean blocked;
+  private StreamsTaskCounter counter;
+
+  private DatumStatusCounter statusCounter = new DatumStatusCounter();
+
+  @Override
+  public DatumStatusCounter getDatumStatusCounter() {
+    return this.statusCounter;
+  }
+
+  /**
+   * Default constructor, uses default sleep time of 500ms when inbound queue is empty
+   * @param processor process to run in task
+   */
+  public StreamsProcessorTask(StreamsProcessor processor) {
+    this(processor, new StreamsConfiguration());
+  }
+
+  /**
+   * @param processor
+   * @param streamConfig
+   */
+  public StreamsProcessorTask(StreamsProcessor processor, StreamsConfiguration streamConfig) {
+    super(streamConfig);
+    this.streamConfig = super.streamConfig;
+    this.processor = processor;
+    this.keepRunning = new AtomicBoolean(true);
+    this.isRunning = new AtomicBoolean(true);
+    this.blocked = new AtomicBoolean(true);
+  }
+
+  @Override
+  public boolean isWaiting() {
+    return this.inQueue.isEmpty() && this.blocked.get();
+  }
+
+  @Override
+  public void stopTask() {
+    this.keepRunning.set(false);
+  }
+
+  @Override
+  public void setStreamConfig(StreamsConfiguration config) {
+    this.streamConfig = config;
+  }
+
+  @Override
+  public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue) {
+    this.inQueue = inputQueue;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return this.isRunning.get();
+  }
+
+  @Override
+  public void run() {
+    try {
+      this.processor.prepare(this.streamConfig);
+      if(this.counter == null) {
+        this.counter = new StreamsTaskCounter(this.processor.getClass().getName()+ UUID.randomUUID().toString(), getStreamIdentifier(), getStartedAt());
+      }
+      while(this.keepRunning.get()) {
+        StreamsDatum datum = null;
         try {
-            this.processor.prepare(this.streamConfig);
-            if(this.counter == null) {
-                this.counter = new StreamsTaskCounter(this.processor.getClass().getName()+ UUID.randomUUID().toString(), getStreamIdentifier(), getStartedAt());
-            }
-            while(this.keepRunning.get()) {
-                StreamsDatum datum = null;
-                try {
-                    this.blocked.set(true);
-                    datum = this.inQueue.poll(streamConfig.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-                } catch (InterruptedException ie) {
-                    LOGGER.debug("Received InteruptedException, shutting down and re-applying interrupt status.");
-                    this.keepRunning.set(false);
-                    if(!this.inQueue.isEmpty()) {
-                        LOGGER.error("Received InteruptedException and input queue still has data, count={}, processor={}",this.inQueue.size(), this.processor.getClass().getName());
-                    }
-                    Thread.currentThread().interrupt();
-                } finally {
-                    this.blocked.set(false);
-                }
-                if(datum != null) {
-                    this.counter.incrementReceivedCount();
-                    try {
-                        long startTime = System.currentTimeMillis();
-                        List<StreamsDatum> output = this.processor.process(datum);
-                        this.counter.addTime(System.currentTimeMillis() - startTime);
-                        if(output != null) {
-                            for(StreamsDatum outDatum : output) {
-                                super.addToOutgoingQueue(outDatum);
-                                this.counter.incrementEmittedCount();
-                                statusCounter.incrementStatus(DatumStatus.SUCCESS);
-                            }
-                        }
-                    } catch (InterruptedException ie) {
-                        LOGGER.warn("Received InterruptedException, shutting down and re-applying interrupt status.");
-                        this.keepRunning.set(false);
-                        Thread.currentThread().interrupt();
-                    } catch (Throwable t) {
-                        this.counter.incrementErrorCount();
-                        LOGGER.warn("Caught Throwable in processor, {} : {}", this.processor.getClass().getName(), t);
-                        statusCounter.incrementStatus(DatumStatus.FAIL);
-                        //Add the error to the metadata, but keep processing
-                        DatumUtils.addErrorToMetadata(datum, t, this.processor.getClass());
-                    }
-                } else {
-                    LOGGER.trace("Removed NULL datum from queue at processor : {}", this.processor.getClass().getName());
-                }
-            }
-        } catch(Throwable e) {
-            LOGGER.error("Caught Throwable in Processor {}", this.processor.getClass().getSimpleName(), e);
+          this.blocked.set(true);
+          datum = this.inQueue.poll(streamConfig.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+        } catch (InterruptedException ie) {
+          LOGGER.debug("Received InteruptedException, shutting down and re-applying interrupt status.");
+          this.keepRunning.set(false);
+          if(!this.inQueue.isEmpty()) {
+            LOGGER.error("Received InteruptedException and input queue still has data, count={}, processor={}",this.inQueue.size(), this.processor.getClass().getName());
+          }
+          Thread.currentThread().interrupt();
         } finally {
-            this.isRunning.set(false);
-            this.processor.cleanUp();
+          this.blocked.set(false);
         }
+        if(datum != null) {
+          this.counter.incrementReceivedCount();
+          try {
+            long startTime = System.currentTimeMillis();
+            List<StreamsDatum> output = this.processor.process(datum);
+            this.counter.addTime(System.currentTimeMillis() - startTime);
+            if(output != null) {
+              for(StreamsDatum outDatum : output) {
+                super.addToOutgoingQueue(outDatum);
+                this.counter.incrementEmittedCount();
+                statusCounter.incrementStatus(DatumStatus.SUCCESS);
+              }
+            }
+          } catch (InterruptedException ie) {
+            LOGGER.warn("Received InterruptedException, shutting down and re-applying interrupt status.");
+            this.keepRunning.set(false);
+            Thread.currentThread().interrupt();
+          } catch (Throwable t) {
+            this.counter.incrementErrorCount();
+            LOGGER.warn("Caught Throwable in processor, {} : {}", this.processor.getClass().getName(), t);
+            statusCounter.incrementStatus(DatumStatus.FAIL);
+            //Add the error to the metadata, but keep processing
+            DatumUtils.addErrorToMetadata(datum, t, this.processor.getClass());
+          }
+        } else {
+          LOGGER.trace("Removed NULL datum from queue at processor : {}", this.processor.getClass().getName());
+        }
+      }
+    } catch(Throwable e) {
+      LOGGER.error("Caught Throwable in Processor {}", this.processor.getClass().getSimpleName(), e);
+    } finally {
+      this.isRunning.set(false);
+      this.processor.cleanUp();
     }
-
-    @Override
-    public List<BlockingQueue<StreamsDatum>> getInputQueues() {
-        List<BlockingQueue<StreamsDatum>> queues = new LinkedList<BlockingQueue<StreamsDatum>>();
-        queues.add(this.inQueue);
-        return queues;
-    }
-
-    @Override
-    public void setStreamsTaskCounter(StreamsTaskCounter counter) {
-        this.counter = counter;
-    }
+  }
+
+  @Override
+  public List<BlockingQueue<StreamsDatum>> getInputQueues() {
+    List<BlockingQueue<StreamsDatum>> queues = new LinkedList<BlockingQueue<StreamsDatum>>();
+    queues.add(this.inQueue);
+    return queues;
+  }
+
+  @Override
+  public void setStreamsTaskCounter(StreamsTaskCounter counter) {
+    this.counter = counter;
+  }
 
 
 }


[36/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistUpdaterIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistUpdaterIT.java b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistUpdaterIT.java
index d34f53f..e356aff 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistUpdaterIT.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistUpdaterIT.java
@@ -18,27 +18,24 @@
 
 package org.apache.streams.elasticsearch.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
-import org.apache.commons.io.Charsets;
-import org.apache.commons.io.IOUtils;
 import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.elasticsearch.ElasticsearchClientManager;
 import org.apache.streams.elasticsearch.ElasticsearchPersistUpdater;
-import org.apache.streams.elasticsearch.ElasticsearchPersistWriter;
 import org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+
+import org.apache.commons.io.Charsets;
+import org.apache.commons.io.IOUtils;
 import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
 import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
-import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
-import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
 import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
 import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
 import org.elasticsearch.action.search.SearchRequestBuilder;
@@ -53,151 +50,150 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.InputStream;
 import java.util.List;
-import java.util.Properties;
 
 import static junit.framework.TestCase.assertTrue;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 
 /**
- * Created by sblackmon on 10/20/14.
+ * Integration Test for
+ * @see org.apache.streams.elasticsearch.ElasticsearchPersistUpdater
  */
 public class ElasticsearchPersistUpdaterIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistUpdaterIT.class);
-
-    private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-
-    protected ElasticsearchWriterConfiguration testConfiguration;
-    protected Client testClient;
-
-    @Before
-    public void prepareTest() throws Exception {
-
-        Config reference  = ConfigFactory.load();
-        File conf_file = new File("target/test-classes/ElasticsearchPersistUpdaterIT.conf");
-        assert(conf_file.exists());
-        Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-        testConfiguration = new ComponentConfigurator<>(ElasticsearchWriterConfiguration.class).detectConfiguration(typesafe, "elasticsearch");
-        testClient = new ElasticsearchClientManager(testConfiguration).getClient();
-
-        ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
-        ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
-        assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
-
-        IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getIndex());
-        IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
-        assertTrue(indicesExistsResponse.isExists());
-
+  private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistUpdaterIT.class);
+
+  private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  protected ElasticsearchWriterConfiguration testConfiguration;
+  protected Client testClient;
+
+  @Before
+  public void prepareTest() throws Exception {
+
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/ElasticsearchPersistUpdaterIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(ElasticsearchWriterConfiguration.class).detectConfiguration(typesafe, "elasticsearch");
+    testClient = new ElasticsearchClientManager(testConfiguration).getClient();
+
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertTrue(indicesExistsResponse.isExists());
+
+  }
+
+  @Test
+  public void testPersistUpdater() throws Exception {
+
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    assertTrue(indicesExistsResponse.isExists());
+
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getIndex())
+        .setTypes(testConfiguration.getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
+
+    long count = countResponse.getHits().getTotalHits();
+
+    ElasticsearchPersistUpdater testPersistUpdater = new ElasticsearchPersistUpdater(testConfiguration);
+    testPersistUpdater.prepare(null);
+
+    InputStream testActivityFolderStream = ElasticsearchPersistUpdaterIT.class.getClassLoader()
+        .getResourceAsStream("activities");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+
+    for( String file : files) {
+      LOGGER.info("File: " + file );
+      InputStream testActivityFileStream = ElasticsearchPersistUpdaterIT.class.getClassLoader()
+          .getResourceAsStream("activities/" + file);
+      Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
+      Activity update = new Activity();
+      update.setAdditionalProperty("updated", Boolean.TRUE);
+      update.setAdditionalProperty("str", "str");
+      update.setAdditionalProperty("long", 10l);
+      update.setActor(
+          new ActivityObject()
+              .withAdditionalProperty("updated", Boolean.TRUE)
+              .withAdditionalProperty("double", 10d)
+              .withAdditionalProperty("map",
+                  MAPPER.createObjectNode().set("field", MAPPER.createArrayNode().add("item"))));
+
+      StreamsDatum datum = new StreamsDatum(update, activity.getVerb());
+      testPersistUpdater.write( datum );
+      LOGGER.info("Updated: " + activity.getVerb() );
     }
 
-    @Test
-    public void testPersistUpdater() throws Exception {
-
-        IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getIndex());
-        IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
-        assertTrue(indicesExistsResponse.isExists());
-
-        SearchRequestBuilder countRequest = testClient
-                .prepareSearch(testConfiguration.getIndex())
-                .setTypes(testConfiguration.getType());
-        SearchResponse countResponse = countRequest.execute().actionGet();
+    testPersistUpdater.cleanUp();
 
-        long count = countResponse.getHits().getTotalHits();
+    SearchRequestBuilder updatedCountRequest = testClient
+        .prepareSearch(testConfiguration.getIndex())
+        .setTypes(testConfiguration.getType())
+        .setQuery(QueryBuilders.existsQuery("updated"));
+    SearchResponse updatedCount = updatedCountRequest.execute().actionGet();
 
-        ElasticsearchPersistUpdater testPersistUpdater = new ElasticsearchPersistUpdater(testConfiguration);
-        testPersistUpdater.prepare(null);
+    LOGGER.info("updated: {}", updatedCount.getHits().getTotalHits());
 
-        InputStream testActivityFolderStream = ElasticsearchPersistUpdaterIT.class.getClassLoader()
-                .getResourceAsStream("activities");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+    assertEquals(count, updatedCount.getHits().getTotalHits());
 
-        for( String file : files) {
-            LOGGER.info("File: " + file );
-            InputStream testActivityFileStream = ElasticsearchPersistUpdaterIT.class.getClassLoader()
-                    .getResourceAsStream("activities/" + file);
-            Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
-            Activity update = new Activity();
-            update.setAdditionalProperty("updated", Boolean.TRUE);
-            update.setAdditionalProperty("str", "str");
-            update.setAdditionalProperty("long", 10l);
-            update.setActor(
-                    new ActivityObject()
-                    .withAdditionalProperty("updated", Boolean.TRUE)
-                    .withAdditionalProperty("double", 10d)
-                    .withAdditionalProperty("map",
-                            MAPPER.createObjectNode().set("field", MAPPER.createArrayNode().add("item"))));
+    SearchRequestBuilder actorUpdatedCountRequest = testClient
+        .prepareSearch(testConfiguration.getIndex())
+        .setTypes(testConfiguration.getType())
+        .setQuery(QueryBuilders.termQuery("actor.updated", true));
+    SearchResponse actorUpdatedCount = actorUpdatedCountRequest.execute().actionGet();
 
-            StreamsDatum datum = new StreamsDatum(update, activity.getVerb());
-            testPersistUpdater.write( datum );
-            LOGGER.info("Updated: " + activity.getVerb() );
-        }
+    LOGGER.info("actor.updated: {}", actorUpdatedCount.getHits().getTotalHits());
 
-        testPersistUpdater.cleanUp();
+    assertEquals(count, actorUpdatedCount.getHits().getTotalHits());
 
-        SearchRequestBuilder updatedCountRequest = testClient
-                .prepareSearch(testConfiguration.getIndex())
-                .setTypes(testConfiguration.getType())
-                .setQuery(QueryBuilders.existsQuery("updated"));
-        SearchResponse updatedCount = updatedCountRequest.execute().actionGet();
+    SearchRequestBuilder strUpdatedCountRequest = testClient
+        .prepareSearch(testConfiguration.getIndex())
+        .setTypes(testConfiguration.getType())
+        .setQuery(QueryBuilders.termQuery("str", "str"));
+    SearchResponse strUpdatedCount = strUpdatedCountRequest.execute().actionGet();
 
-        LOGGER.info("updated: {}", updatedCount.getHits().getTotalHits());
+    LOGGER.info("strupdated: {}", strUpdatedCount.getHits().getTotalHits());
 
-        assertEquals(count, updatedCount.getHits().getTotalHits());
+    assertEquals(count, strUpdatedCount.getHits().getTotalHits());
 
-        SearchRequestBuilder actorUpdatedCountRequest = testClient
-                .prepareSearch(testConfiguration.getIndex())
-                .setTypes(testConfiguration.getType())
-                .setQuery(QueryBuilders.termQuery("actor.updated", true));
-        SearchResponse actorUpdatedCount = actorUpdatedCountRequest.execute().actionGet();
+    SearchRequestBuilder longUpdatedCountRequest = testClient
+        .prepareSearch(testConfiguration.getIndex())
+        .setTypes(testConfiguration.getType())
+        .setQuery(QueryBuilders.rangeQuery("long").from(9).to(11));
+    SearchResponse longUpdatedCount = longUpdatedCountRequest.execute().actionGet();
 
-        LOGGER.info("actor.updated: {}", actorUpdatedCount.getHits().getTotalHits());
+    LOGGER.info("longupdated: {}", longUpdatedCount.getHits().getTotalHits());
 
-        assertEquals(count, actorUpdatedCount.getHits().getTotalHits());
+    assertEquals(count, longUpdatedCount.getHits().getTotalHits());
 
-        SearchRequestBuilder strUpdatedCountRequest = testClient
-                .prepareSearch(testConfiguration.getIndex())
-                .setTypes(testConfiguration.getType())
-                .setQuery(QueryBuilders.termQuery("str", "str"));
-        SearchResponse strUpdatedCount = strUpdatedCountRequest.execute().actionGet();
+    SearchRequestBuilder doubleUpdatedCountRequest = testClient
+        .prepareSearch(testConfiguration.getIndex())
+        .setTypes(testConfiguration.getType())
+        .setQuery(QueryBuilders.rangeQuery("long").from(9).to(11));
+    SearchResponse doubleUpdatedCount = doubleUpdatedCountRequest.execute().actionGet();
 
-        LOGGER.info("strupdated: {}", strUpdatedCount.getHits().getTotalHits());
+    LOGGER.info("doubleupdated: {}", doubleUpdatedCount.getHits().getTotalHits());
 
-        assertEquals(count, strUpdatedCount.getHits().getTotalHits());
+    assertEquals(count, doubleUpdatedCount.getHits().getTotalHits());
 
-        SearchRequestBuilder longUpdatedCountRequest = testClient
-                .prepareSearch(testConfiguration.getIndex())
-                .setTypes(testConfiguration.getType())
-                .setQuery(QueryBuilders.rangeQuery("long").from(9).to(11));
-        SearchResponse longUpdatedCount = longUpdatedCountRequest.execute().actionGet();
+    SearchRequestBuilder mapUpdatedCountRequest = testClient
+        .prepareSearch(testConfiguration.getIndex())
+        .setTypes(testConfiguration.getType())
+        .setQuery(QueryBuilders.termQuery("actor.map.field", "item"));
+    SearchResponse mapUpdatedCount = mapUpdatedCountRequest.execute().actionGet();
 
-        LOGGER.info("longupdated: {}", longUpdatedCount.getHits().getTotalHits());
+    LOGGER.info("mapfieldupdated: {}", mapUpdatedCount.getHits().getTotalHits());
 
-        assertEquals(count, longUpdatedCount.getHits().getTotalHits());
+    assertEquals(count, mapUpdatedCount.getHits().getTotalHits());
 
-        SearchRequestBuilder doubleUpdatedCountRequest = testClient
-                .prepareSearch(testConfiguration.getIndex())
-                .setTypes(testConfiguration.getType())
-                .setQuery(QueryBuilders.rangeQuery("long").from(9).to(11));
-        SearchResponse doubleUpdatedCount = doubleUpdatedCountRequest.execute().actionGet();
-
-        LOGGER.info("doubleupdated: {}", doubleUpdatedCount.getHits().getTotalHits());
-
-        assertEquals(count, doubleUpdatedCount.getHits().getTotalHits());
-
-        SearchRequestBuilder mapUpdatedCountRequest = testClient
-                .prepareSearch(testConfiguration.getIndex())
-                .setTypes(testConfiguration.getType())
-                .setQuery(QueryBuilders.termQuery("actor.map.field", "item"));
-        SearchResponse mapUpdatedCount = mapUpdatedCountRequest.execute().actionGet();
-
-        LOGGER.info("mapfieldupdated: {}", mapUpdatedCount.getHits().getTotalHits());
-
-        assertEquals(count, mapUpdatedCount.getHits().getTotalHits());
-
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistWriterIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistWriterIT.java b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistWriterIT.java
index f291dcd..f290971 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistWriterIT.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchPersistWriterIT.java
@@ -18,21 +18,21 @@
 
 package org.apache.streams.elasticsearch.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
-import org.apache.commons.io.Charsets;
-import org.apache.commons.io.IOUtils;
 import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.elasticsearch.ElasticsearchClientManager;
 import org.apache.streams.elasticsearch.ElasticsearchPersistWriter;
 import org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+
+import org.apache.commons.io.Charsets;
+import org.apache.commons.io.IOUtils;
 import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
 import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
 import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
@@ -58,71 +58,72 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 
 /**
- * Created by sblackmon on 10/20/14.
+ * Integration Test for
+ * @see org.apache.streams.elasticsearch.ElasticsearchPersistWriter
  */
 public class ElasticsearchPersistWriterIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistWriterIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchPersistWriterIT.class);
 
-    private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-    protected ElasticsearchWriterConfiguration testConfiguration;
-    protected Client testClient;
+  protected ElasticsearchWriterConfiguration testConfiguration;
+  protected Client testClient;
 
-    @Before
-    public void prepareTest() throws Exception {
+  @Before
+  public void prepareTest() throws Exception {
 
-        Config reference  = ConfigFactory.load();
-        File conf_file = new File("target/test-classes/ElasticsearchPersistWriterIT.conf");
-        assert(conf_file.exists());
-        Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-        testConfiguration = new ComponentConfigurator<>(ElasticsearchWriterConfiguration.class).detectConfiguration(typesafe, "elasticsearch");
-        testClient = new ElasticsearchClientManager(testConfiguration).getClient();
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/ElasticsearchPersistWriterIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(ElasticsearchWriterConfiguration.class).detectConfiguration(typesafe, "elasticsearch");
+    testClient = new ElasticsearchClientManager(testConfiguration).getClient();
 
-        ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
-        ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
-        assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
+    ClusterHealthRequest clusterHealthRequest = Requests.clusterHealthRequest();
+    ClusterHealthResponse clusterHealthResponse = testClient.admin().cluster().health(clusterHealthRequest).actionGet();
+    assertNotEquals(clusterHealthResponse.getStatus(), ClusterHealthStatus.RED);
 
-        IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getIndex());
-        IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
-        if(indicesExistsResponse.isExists()) {
-            DeleteIndexRequest deleteIndexRequest = Requests.deleteIndexRequest(testConfiguration.getIndex());
-            DeleteIndexResponse deleteIndexResponse = testClient.admin().indices().delete(deleteIndexRequest).actionGet();
-            assertTrue(deleteIndexResponse.isAcknowledged());
-        };
+    IndicesExistsRequest indicesExistsRequest = Requests.indicesExistsRequest(testConfiguration.getIndex());
+    IndicesExistsResponse indicesExistsResponse = testClient.admin().indices().exists(indicesExistsRequest).actionGet();
+    if(indicesExistsResponse.isExists()) {
+      DeleteIndexRequest deleteIndexRequest = Requests.deleteIndexRequest(testConfiguration.getIndex());
+      DeleteIndexResponse deleteIndexResponse = testClient.admin().indices().delete(deleteIndexRequest).actionGet();
+      assertTrue(deleteIndexResponse.isAcknowledged());
+    };
 
-    }
+  }
 
-    @Test
-    public void testPersistWriter() throws Exception {
+  @Test
+  public void testPersistWriter() throws Exception {
 
-        ElasticsearchPersistWriter testPersistWriter = new ElasticsearchPersistWriter(testConfiguration);
-        testPersistWriter.prepare(null);
+    ElasticsearchPersistWriter testPersistWriter = new ElasticsearchPersistWriter(testConfiguration);
+    testPersistWriter.prepare(null);
 
-        InputStream testActivityFolderStream = ElasticsearchPersistWriterIT.class.getClassLoader()
-               .getResourceAsStream("activities");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+    InputStream testActivityFolderStream = ElasticsearchPersistWriterIT.class.getClassLoader()
+        .getResourceAsStream("activities");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
 
-        for( String file : files) {
-           LOGGER.info("File: " + file );
-           InputStream testActivityFileStream = ElasticsearchPersistWriterIT.class.getClassLoader()
-                   .getResourceAsStream("activities/" + file);
-           Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
-           StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
-           testPersistWriter.write( datum );
-           LOGGER.info("Wrote: " + activity.getVerb() );
-        }
+    for( String file : files) {
+      LOGGER.info("File: " + file );
+      InputStream testActivityFileStream = ElasticsearchPersistWriterIT.class.getClassLoader()
+          .getResourceAsStream("activities/" + file);
+      Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
+      StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
+      testPersistWriter.write( datum );
+      LOGGER.info("Wrote: " + activity.getVerb() );
+    }
 
-        testPersistWriter.cleanUp();
+    testPersistWriter.cleanUp();
 
-        SearchRequestBuilder countRequest = testClient
-                .prepareSearch(testConfiguration.getIndex())
-                .setTypes(testConfiguration.getType());
-        SearchResponse countResponse = countRequest.execute().actionGet();
+    SearchRequestBuilder countRequest = testClient
+        .prepareSearch(testConfiguration.getIndex())
+        .setTypes(testConfiguration.getType());
+    SearchResponse countResponse = countRequest.execute().actionGet();
 
-        assertEquals(89, countResponse.getHits().getTotalHits());
+    assertEquals(89, countResponse.getHits().getTotalHits());
 
-    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/TestMetadataFromDocumentProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/TestMetadataFromDocumentProcessor.java b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/TestMetadataFromDocumentProcessor.java
index ab45cf3..76f10b1 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/TestMetadataFromDocumentProcessor.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/TestMetadataFromDocumentProcessor.java
@@ -18,25 +18,20 @@
 
 package org.apache.streams.elasticsearch.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Sets;
-import org.apache.commons.io.Charsets;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.SerializationUtils;
 import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.elasticsearch.processor.DocumentToMetadataProcessor;
 import org.apache.streams.elasticsearch.processor.MetadataFromDocumentProcessor;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
-import org.junit.Assert;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Sets;
+
+import org.apache.commons.io.Charsets;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.SerializationUtils;
 import org.junit.Before;
 import org.junit.Test;
-import org.reflections.Reflections;
-import org.reflections.scanners.SubTypesScanner;
-import org.reflections.util.ClasspathHelper;
-import org.reflections.util.ConfigurationBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -45,88 +40,89 @@ import java.util.List;
 import java.util.Set;
 
 /**
- * Created by sblackmon on 10/20/14.
+ * Unit Test for
+ * @see org.apache.streams.elasticsearch.processor.MetadataFromDocumentProcessor
  */
 public class TestMetadataFromDocumentProcessor {
 
-    private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TestMetadataFromDocumentProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TestMetadataFromDocumentProcessor.class);
 
-    @Before
-    public void prepareTest() {
+  @Before
+  public void prepareTest() {
 
-    }
+  }
 
-    @Test
-    public void testSerializability() {
-        MetadataFromDocumentProcessor processor = new MetadataFromDocumentProcessor();
+  @Test
+  public void testSerializability() {
+    MetadataFromDocumentProcessor processor = new MetadataFromDocumentProcessor();
 
-        MetadataFromDocumentProcessor clone = (MetadataFromDocumentProcessor) SerializationUtils.clone(processor);
-    }
+    MetadataFromDocumentProcessor clone = (MetadataFromDocumentProcessor) SerializationUtils.clone(processor);
+  }
 
-    @Test
-    public void testMetadataFromDocumentProcessor() throws Exception {
+  @Test
+  public void testMetadataFromDocumentProcessor() throws Exception {
 
-        MetadataFromDocumentProcessor processor = new MetadataFromDocumentProcessor();
+    MetadataFromDocumentProcessor processor = new MetadataFromDocumentProcessor();
 
-        processor.prepare(null);
+    processor.prepare(null);
 
-        InputStream testActivityFolderStream = TestMetadataFromDocumentProcessor.class.getClassLoader()
-                .getResourceAsStream("activities");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+    InputStream testActivityFolderStream = TestMetadataFromDocumentProcessor.class.getClassLoader()
+        .getResourceAsStream("activities");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
 
-        Set<ActivityObject> objects = Sets.newHashSet();
+    Set<ActivityObject> objects = Sets.newHashSet();
 
-        for( String file : files) {
-            LOGGER.info("File: " + file );
-            InputStream testActivityFileStream = TestMetadataFromDocumentProcessor.class.getClassLoader()
-                    .getResourceAsStream("activities/" + file);
-            Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
-            activity.setId(activity.getVerb());
-            activity.getAdditionalProperties().remove("$license");
+    for( String file : files) {
+      LOGGER.info("File: " + file );
+      InputStream testActivityFileStream = TestMetadataFromDocumentProcessor.class.getClassLoader()
+          .getResourceAsStream("activities/" + file);
+      Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
+      activity.setId(activity.getVerb());
+      activity.getAdditionalProperties().remove("$license");
 
-            if( activity.getActor().getObjectType() != null)
-                objects.add(activity.getActor());
-            if( activity.getObject().getObjectType() != null)
-                objects.add(activity.getObject());
+      if( activity.getActor().getObjectType() != null)
+        objects.add(activity.getActor());
+      if( activity.getObject().getObjectType() != null)
+        objects.add(activity.getObject());
 
-            StreamsDatum datum = new StreamsDatum(activity);
+      StreamsDatum datum = new StreamsDatum(activity);
 
-            List<StreamsDatum> resultList = processor.process(datum);
-            assert(resultList != null);
-            assert(resultList.size() == 1);
+      List<StreamsDatum> resultList = processor.process(datum);
+      assert(resultList != null);
+      assert(resultList.size() == 1);
 
-            StreamsDatum result = resultList.get(0);
-            assert(result != null);
-            assert(result.getDocument() != null);
-            assert(result.getId() != null);
-            assert(result.getMetadata() != null);
-            assert(result.getMetadata().get("id") != null);
-            assert(result.getMetadata().get("type") != null);
+      StreamsDatum result = resultList.get(0);
+      assert(result != null);
+      assert(result.getDocument() != null);
+      assert(result.getId() != null);
+      assert(result.getMetadata() != null);
+      assert(result.getMetadata().get("id") != null);
+      assert(result.getMetadata().get("type") != null);
 
-            LOGGER.info("valid: " + activity.getVerb() );
-        }
+      LOGGER.info("valid: " + activity.getVerb() );
+    }
 
-        for( ActivityObject activityObject : objects) {
-            LOGGER.info("Object: " + MAPPER.writeValueAsString(activityObject));
+    for( ActivityObject activityObject : objects) {
+      LOGGER.info("Object: " + MAPPER.writeValueAsString(activityObject));
 
-            activityObject.setId(activityObject.getObjectType());
-            StreamsDatum datum = new StreamsDatum(activityObject);
+      activityObject.setId(activityObject.getObjectType());
+      StreamsDatum datum = new StreamsDatum(activityObject);
 
-            List<StreamsDatum> resultList = processor.process(datum);
-            assert(resultList != null);
-            assert(resultList.size() == 1);
+      List<StreamsDatum> resultList = processor.process(datum);
+      assert(resultList != null);
+      assert(resultList.size() == 1);
 
-            StreamsDatum result = resultList.get(0);
-            assert(result != null);
-            assert(result.getDocument() != null);
-            assert(result.getId() != null);
-            assert(result.getMetadata() != null);
-            assert(result.getMetadata().get("id") != null);
-            assert(result.getMetadata().get("type") != null);
+      StreamsDatum result = resultList.get(0);
+      assert(result != null);
+      assert(result.getDocument() != null);
+      assert(result.getId() != null);
+      assert(result.getMetadata() != null);
+      assert(result.getMetadata().get("id") != null);
+      assert(result.getMetadata().get("type") != null);
 
-            LOGGER.info("valid: " + activityObject.getObjectType() );
-        }
+      LOGGER.info("valid: " + activityObject.getObjectType() );
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistReader.java b/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistReader.java
index 504ea5e..b921ba5 100644
--- a/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistReader.java
+++ b/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistReader.java
@@ -18,15 +18,17 @@
 
 package org.apache.streams.filebuffer;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Queues;
-import com.squareup.tape.QueueFile;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistReader;
 import org.apache.streams.core.StreamsResultSet;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Queues;
+import com.squareup.tape.QueueFile;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -48,135 +50,135 @@ import java.util.concurrent.Executors;
  */
 public class FileBufferPersistReader implements StreamsPersistReader, Serializable {
 
-    public static final String STREAMS_ID = "FileBufferPersistReader";
+  public static final String STREAMS_ID = "FileBufferPersistReader";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FileBufferPersistReader.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(FileBufferPersistReader.class);
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    private ObjectMapper mapper;
+  private ObjectMapper mapper;
 
-    private FileBufferConfiguration config;
+  private FileBufferConfiguration config;
 
-    private QueueFile queueFile;
+  private QueueFile queueFile;
 
-    private boolean isStarted = false;
-    private boolean isStopped = false;
+  private boolean isStarted = false;
+  private boolean isStopped = false;
 
-    private ExecutorService executor = Executors.newSingleThreadExecutor();
+  private ExecutorService executor = Executors.newSingleThreadExecutor();
 
-    public FileBufferPersistReader() {
-        this(new ComponentConfigurator<>(FileBufferConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("filebuffer")));
-    }
+  public FileBufferPersistReader() {
+    this(new ComponentConfigurator<>(FileBufferConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("filebuffer")));
+  }
 
-    public FileBufferPersistReader(FileBufferConfiguration config) {
-        this.config = config;
-    }
+  public FileBufferPersistReader(FileBufferConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public StreamsResultSet readAll() {
-        return readCurrent();
-    }
+  @Override
+  public StreamsResultSet readAll() {
+    return readCurrent();
+  }
 
-    @Override
-    public void startStream() {
-        isStarted = true;
-    }
+  @Override
+  public void startStream() {
+    isStarted = true;
+  }
 
-    @Override
-    public StreamsResultSet readCurrent() {
-
-        while (!queueFile.isEmpty()) {
-            try {
-                byte[] bytes = queueFile.peek();
-                ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
-                BufferedReader buf = new BufferedReader(new InputStreamReader(bais));
-                String s = buf.readLine();
-                LOGGER.debug(s);
-                write(new StreamsDatum(s));
-                queueFile.remove();
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
-        }
-
-        StreamsResultSet current;
-        current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(persistQueue));
-        persistQueue.clear();
-
-        return current;
-    }
+  @Override
+  public StreamsResultSet readCurrent() {
 
-    private void write( StreamsDatum entry ) {
-        persistQueue.offer(entry);
+    while (!queueFile.isEmpty()) {
+      try {
+        byte[] bytes = queueFile.peek();
+        ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
+        BufferedReader buf = new BufferedReader(new InputStreamReader(bais));
+        String line = buf.readLine();
+        LOGGER.debug(line);
+        write(new StreamsDatum(line));
+        queueFile.remove();
+      } catch (IOException ex) {
+        ex.printStackTrace();
+      }
     }
 
-    @Override
-    public StreamsResultSet readNew(BigInteger bigInteger) {
-        return null;
-    }
+    StreamsResultSet current;
+    current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(persistQueue));
+    persistQueue.clear();
 
-    @Override
-    public StreamsResultSet readRange(DateTime dateTime, DateTime dateTime2) {
-        return null;
-    }
+    return current;
+  }
 
-    @Override
-    public boolean isRunning() {
-        return isStarted && !isStopped;
-    }
+  private void write( StreamsDatum entry ) {
+    persistQueue.offer(entry);
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public StreamsResultSet readNew(BigInteger bigInteger) {
+    return null;
+  }
 
-        try {
-            Thread.sleep(1000);
-        } catch (InterruptedException ie) {
-            //Handle exception
-        }
+  @Override
+  public StreamsResultSet readRange(DateTime dateTime, DateTime dateTime2) {
+    return null;
+  }
 
-        mapper = new ObjectMapper();
+  @Override
+  public boolean isRunning() {
+    return isStarted && !isStopped;
+  }
 
-        File file = new File( config.getPath());
+  @Override
+  public void prepare(Object configurationObject) {
 
-        if( !file.exists() ) {
-            try {
-                file.createNewFile();
-            } catch (IOException e) {
-                LOGGER.error(e.getMessage());
-            }
-        }
+    try {
+      Thread.sleep(1000);
+    } catch (InterruptedException ie) {
+      //Handle exception
+    }
 
-        Preconditions.checkArgument(file.exists());
-        Preconditions.checkArgument(file.canRead());
+    mapper = new ObjectMapper();
 
-        try {
-            queueFile = new QueueFile(file);
-        } catch (IOException e) {
-            LOGGER.error(e.getMessage());
-        }
+    File file = new File( config.getPath());
 
-        Preconditions.checkNotNull(queueFile);
+    if ( !file.exists() ) {
+      try {
+        file.createNewFile();
+      } catch (IOException ex) {
+        LOGGER.error(ex.getMessage());
+      }
+    }
 
-        this.persistQueue = new ConcurrentLinkedQueue<>();
+    Preconditions.checkArgument(file.exists());
+    Preconditions.checkArgument(file.canRead());
 
+    try {
+      queueFile = new QueueFile(file);
+    } catch (IOException ex) {
+      LOGGER.error(ex.getMessage());
     }
 
-        @Override
-    public void cleanUp() {
-        try {
-            queueFile.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        } finally {
-            queueFile = null;
-            isStopped = true;
-        }
+    Preconditions.checkNotNull(queueFile);
+
+    this.persistQueue = new ConcurrentLinkedQueue<>();
+
+  }
+
+  @Override
+  public void cleanUp() {
+    try {
+      queueFile.close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } finally {
+      queueFile = null;
+      isStopped = true;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistWriter.java b/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistWriter.java
index 4dea85c..76dfafc 100644
--- a/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistWriter.java
+++ b/streams-contrib/streams-persist-filebuffer/src/main/java/org/apache/streams/filebuffer/FileBufferPersistWriter.java
@@ -18,15 +18,17 @@
 
 package org.apache.streams.filebuffer;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import com.squareup.tape.QueueFile;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
 import org.apache.streams.util.GuidUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import com.squareup.tape.QueueFile;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -41,79 +43,79 @@ import java.util.concurrent.ConcurrentLinkedQueue;
  */
 public class FileBufferPersistWriter implements StreamsPersistWriter, Serializable {
 
-    public final static String STREAMS_ID = "FileBufferPersistWriter";
+  public static final String STREAMS_ID = "FileBufferPersistWriter";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FileBufferPersistWriter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(FileBufferPersistWriter.class);
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    private ObjectMapper mapper;
+  private ObjectMapper mapper;
 
-    private FileBufferConfiguration config;
+  private FileBufferConfiguration config;
 
-    private QueueFile queueFile;
+  private QueueFile queueFile;
 
-    public FileBufferPersistWriter() {
-       this(new ComponentConfigurator<>(FileBufferConfiguration.class)
-         .detectConfiguration(StreamsConfigurator.getConfig().getConfig("filebuffer")));
-    }
+  public FileBufferPersistWriter() {
+    this(new ComponentConfigurator<>(FileBufferConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("filebuffer")));
+  }
 
-    public FileBufferPersistWriter(FileBufferConfiguration config) {
-        this.config = config;
-    }
+  public FileBufferPersistWriter(FileBufferConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public void write(StreamsDatum entry) {
+  @Override
+  public void write(StreamsDatum entry) {
 
-        String key = entry.getId() != null ? entry.getId() : GuidUtils.generateGuid("filewriter");
+    String key = entry.getId() != null ? entry.getId() : GuidUtils.generateGuid("filewriter");
 
-        Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
-        Preconditions.checkArgument(entry.getDocument() instanceof String);
-        Preconditions.checkArgument(!Strings.isNullOrEmpty((String) entry.getDocument()));
+    Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
+    Preconditions.checkArgument(entry.getDocument() instanceof String);
+    Preconditions.checkArgument(!Strings.isNullOrEmpty((String) entry.getDocument()));
 
-        byte[] item = ((String)entry.getDocument()).getBytes();
-        try {
-            queueFile.add(item);
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
+    byte[] item = ((String)entry.getDocument()).getBytes();
+    try {
+      queueFile.add(item);
+    } catch (IOException ex) {
+      ex.printStackTrace();
     }
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-        mapper = new ObjectMapper();
+    mapper = new ObjectMapper();
 
-        File file = new File( config.getPath());
+    File file = new File( config.getPath());
 
-        try {
-            queueFile = new QueueFile(file);
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
+    try {
+      queueFile = new QueueFile(file);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    }
 
-        Preconditions.checkArgument(file.exists());
-        Preconditions.checkArgument(file.canWrite());
+    Preconditions.checkArgument(file.exists());
+    Preconditions.checkArgument(file.canWrite());
 
-        Preconditions.checkNotNull(queueFile);
+    Preconditions.checkNotNull(queueFile);
 
-        this.persistQueue  = new ConcurrentLinkedQueue<>();
+    this.persistQueue  = new ConcurrentLinkedQueue<>();
 
-    }
+  }
 
-    @Override
-    public void cleanUp() {
-        try {
-            queueFile.close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        } finally {
-            queueFile = null;
-        }
+  @Override
+  public void cleanUp() {
+    try {
+      queueFile.close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    } finally {
+      queueFile = null;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphHttpPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphHttpPersistWriter.java b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphHttpPersistWriter.java
index 3c97fd7..847328a 100644
--- a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphHttpPersistWriter.java
+++ b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphHttpPersistWriter.java
@@ -18,16 +18,6 @@
 
 package org.apache.streams.graph;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-import org.apache.http.HttpEntity;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.util.EntityUtils;
 import org.apache.streams.components.http.HttpPersistWriterConfiguration;
 import org.apache.streams.components.http.persist.SimpleHTTPPostPersistWriter;
 import org.apache.streams.config.ComponentConfigurator;
@@ -39,6 +29,18 @@ import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Provider;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+
+import org.apache.http.HttpEntity;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.util.EntityUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -49,190 +51,203 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 /**
  * Adds activityobjects as vertices and activities as edges to a graph database with
- * an http rest endpoint (such as neo4j)
+ * an http rest endpoint (such as neo4j).
  */
 public class GraphHttpPersistWriter extends SimpleHTTPPostPersistWriter {
 
-    public static final String STREAMS_ID = GraphHttpPersistWriter.class.getCanonicalName();
+  public static final String STREAMS_ID = GraphHttpPersistWriter.class.getCanonicalName();
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(GraphHttpPersistWriter.class);
-    private final static long MAX_WRITE_LATENCY = 1000;
+  private static final Logger LOGGER = LoggerFactory.getLogger(GraphHttpPersistWriter.class);
+  private static final long MAX_WRITE_LATENCY = 1000;
 
-    protected GraphHttpConfiguration configuration;
+  protected GraphHttpConfiguration configuration;
 
-    protected QueryGraphHelper queryGraphHelper;
-    protected HttpGraphHelper httpGraphHelper;
+  protected QueryGraphHelper queryGraphHelper;
+  protected HttpGraphHelper httpGraphHelper;
 
-    private static ObjectMapper mapper;
+  private static ObjectMapper mapper;
 
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    public GraphHttpPersistWriter() {
-        this(new ComponentConfigurator<GraphHttpConfiguration>(GraphHttpConfiguration.class).detectConfiguration(StreamsConfigurator.config.getConfig("graph")));
-    }
+  /**
+   * GraphHttpPersistWriter constructor - resolve GraphHttpConfiguration from JVM 'graph'.
+   */
+  public GraphHttpPersistWriter() {
+    this(new ComponentConfigurator<GraphHttpConfiguration>(GraphHttpConfiguration.class).detectConfiguration(StreamsConfigurator.config.getConfig("graph")));
+  }
 
-    public GraphHttpPersistWriter(GraphHttpConfiguration configuration) {
-        super(StreamsJacksonMapper.getInstance().convertValue(configuration, HttpPersistWriterConfiguration.class));
-        if( configuration.getType().equals(GraphHttpConfiguration.Type.NEO_4_J)) {
-            super.configuration.setResourcePath("/db/" + configuration.getGraph() + "/transaction/commit/");
+  /**
+   * GraphHttpPersistWriter constructor - use supplied GraphHttpConfiguration.
+   * @param configuration GraphHttpConfiguration
+   */
+  public GraphHttpPersistWriter(GraphHttpConfiguration configuration) {
+    super(StreamsJacksonMapper.getInstance().convertValue(configuration, HttpPersistWriterConfiguration.class));
+    if ( configuration.getType().equals(GraphHttpConfiguration.Type.NEO_4_J)) {
+      super.configuration.setResourcePath("/db/" + configuration.getGraph() + "/transaction/commit/");
+    } else if ( configuration.getType().equals(GraphHttpConfiguration.Type.REXSTER)) {
+      super.configuration.setResourcePath("/graphs/" + configuration.getGraph());
+    }
+    this.configuration = configuration;
+  }
+
+  @Override
+  protected ObjectNode preparePayload(StreamsDatum entry) throws Exception {
+
+    Activity activity = null;
+    ActivityObject activityObject = null;
+    Object document = entry.getDocument();
+
+    if (document instanceof Activity) {
+      activity = (Activity) document;
+      activityObject = activity.getObject();
+    } else if (document instanceof ActivityObject) {
+      activityObject = (ActivityObject) document;
+    } else {
+      ObjectNode objectNode;
+      if (document instanceof ObjectNode) {
+        objectNode = (ObjectNode) document;
+      } else if ( document instanceof String) {
+        try {
+          objectNode = mapper.readValue((String) document, ObjectNode.class);
+        } catch (IOException ex) {
+          LOGGER.error("Can't handle input: ", entry);
+          throw ex;
         }
-        else if( configuration.getType().equals(GraphHttpConfiguration.Type.REXSTER)) {
-            super.configuration.setResourcePath("/graphs/" + configuration.getGraph());
+      } else {
+        LOGGER.error("Can't handle input: ", entry);
+        throw new Exception("Can't create payload from datum.");
+      }
+
+      if ( objectNode.get("verb") != null ) {
+        try {
+          activity = mapper.convertValue(objectNode, Activity.class);
+          activityObject = activity.getObject();
+        } catch (Exception ex) {
+          activityObject = mapper.convertValue(objectNode, ActivityObject.class);
         }
-        this.configuration = configuration;
+      } else {
+        activityObject = mapper.convertValue(objectNode, ActivityObject.class);
+      }
     }
 
-    @Override
-    protected ObjectNode preparePayload(StreamsDatum entry) throws Exception {
-
-        Activity activity = null;
-        ActivityObject activityObject = null;
-        Object document = entry.getDocument();
-
-        if (document instanceof Activity) {
-            activity = (Activity) document;
-            activityObject = activity.getObject();
-        } else if (document instanceof ActivityObject) {
-            activityObject = (ActivityObject) document;
-        } else {
-            ObjectNode objectNode;
-            if (document instanceof ObjectNode) {
-                objectNode = (ObjectNode) document;
-            } else if( document instanceof String) {
-                try {
-                    objectNode = mapper.readValue((String) document, ObjectNode.class);
-                } catch (IOException e) {
-                    LOGGER.error("Can't handle input: ", entry);
-                    throw e;
-                }
-            } else {
-                LOGGER.error("Can't handle input: ", entry);
-                throw new Exception("Can't create payload from datum.");
-            }
-
-            if( objectNode.get("verb") != null ) {
-                try {
-                    activity = mapper.convertValue(objectNode, Activity.class);
-                    activityObject = activity.getObject();
-                } catch (Exception e) {
-                    activityObject = mapper.convertValue(objectNode, ActivityObject.class);
-                }
-            } else {
-                activityObject = mapper.convertValue(objectNode, ActivityObject.class);
-            }
-        }
+    Preconditions.checkArgument(activity != null || activityObject != null);
+
+    ObjectNode request = mapper.createObjectNode();
+    ArrayNode statements = mapper.createArrayNode();
+
+    // always add vertices first
 
-        Preconditions.checkArgument(activity != null || activityObject != null);
+    List<String> labels = Lists.newArrayList("streams");
 
-        ObjectNode request = mapper.createObjectNode();
-        ArrayNode statements = mapper.createArrayNode();
+    if ( activityObject != null ) {
+      if ( activityObject.getObjectType() != null ) {
+        labels.add(activityObject.getObjectType());
+      }
+      statements.add(httpGraphHelper.createHttpRequest(queryGraphHelper.mergeVertexRequest(activityObject)));
+    }
 
-        // always add vertices first
+    if ( activity != null ) {
 
-        List<String> labels = Lists.newArrayList("streams");
+      ActivityObject actor = activity.getActor();
+      Provider provider = activity.getProvider();
 
-        if( activityObject != null ) {
-            if (activityObject.getObjectType() != null)
-                labels.add(activityObject.getObjectType());
-            statements.add(httpGraphHelper.createHttpRequest(queryGraphHelper.mergeVertexRequest(activityObject)));
+      if ( provider != null
+          && !Strings.isNullOrEmpty(provider.getId()) ) {
+        labels.add(provider.getId());
+      }
+      if (actor != null
+          && !Strings.isNullOrEmpty(actor.getId())) {
+        if (actor.getObjectType() != null) {
+          labels.add(actor.getObjectType());
         }
+        statements.add(httpGraphHelper.createHttpRequest(queryGraphHelper.mergeVertexRequest(actor)));
+      }
 
-        if( activity != null ) {
-
-            ActivityObject actor = activity.getActor();
-            Provider provider = activity.getProvider();
-
-            if( provider != null &&
-                    !Strings.isNullOrEmpty(provider.getId()) ) {
-                labels.add(provider.getId());
-            }
-            if (actor != null &&
-                    !Strings.isNullOrEmpty(actor.getId())) {
-                if (actor.getObjectType() != null)
-                    labels.add(actor.getObjectType());
-                statements.add(httpGraphHelper.createHttpRequest(queryGraphHelper.mergeVertexRequest(actor)));
-            }
-
-            if (activityObject != null &&
-                    !Strings.isNullOrEmpty(activityObject.getId())) {
-                if (activityObject.getObjectType() != null)
-                    labels.add(activityObject.getObjectType());
-                statements.add(httpGraphHelper.createHttpRequest(queryGraphHelper.mergeVertexRequest(activityObject)));
-            }
-
-            // then add edge
-
-            if (!Strings.isNullOrEmpty(activity.getVerb())) {
-                statements.add(httpGraphHelper.createHttpRequest(queryGraphHelper.createEdgeRequest(activity)));
-            }
+      if (activityObject != null
+          && !Strings.isNullOrEmpty(activityObject.getId())) {
+        if (activityObject.getObjectType() != null) {
+          labels.add(activityObject.getObjectType());
         }
+        statements.add(httpGraphHelper.createHttpRequest(queryGraphHelper.mergeVertexRequest(activityObject)));
+      }
 
-        request.put("statements", statements);
-        return request;
+      // then add edge
 
+      if (!Strings.isNullOrEmpty(activity.getVerb())) {
+        statements.add(httpGraphHelper.createHttpRequest(queryGraphHelper.createEdgeRequest(activity)));
+      }
     }
 
-    @Override
-    protected ObjectNode executePost(HttpPost httpPost) {
-
-        Preconditions.checkNotNull(httpPost);
-
-        ObjectNode result = null;
-
-        CloseableHttpResponse response = null;
-
-        String entityString = null;
-        try {
-            response = httpclient.execute(httpPost);
-            HttpEntity entity = response.getEntity();
-            if (response.getStatusLine().getStatusCode() == 200 || response.getStatusLine().getStatusCode() == 201 && entity != null) {
-                entityString = EntityUtils.toString(entity);
-                result = mapper.readValue(entityString, ObjectNode.class);
-            }
-            LOGGER.debug("Writer response:\n{}\n{}\n{}", httpPost.toString(), response.getStatusLine().getStatusCode(), entityString);
-            if( result == null ||
-                    (
-                        result.get("errors") != null &&
-                        result.get("errors").isArray() &&
-                        result.get("errors").iterator().hasNext()
-                    )
-                ) {
-                LOGGER.error("Write Error: " + result.get("errors"));
-            } else {
-                LOGGER.debug("Write Success");
-            }
-        } catch (IOException e) {
-            LOGGER.error("IO error:\n{}\n{}\n{}", httpPost.toString(), response, e.getMessage());
-        } catch (Exception e) {
-            LOGGER.error("Write Exception:\n{}\n{}\n{}", httpPost.toString(), response, e.getMessage());
-        } finally {
-            try {
-                if( response != null) response.close();
-            } catch (IOException e) {}
+    request.put("statements", statements);
+    return request;
+
+  }
+
+  @Override
+  protected ObjectNode executePost(HttpPost httpPost) {
+
+    Preconditions.checkNotNull(httpPost);
+
+    ObjectNode result = null;
+
+    CloseableHttpResponse response = null;
+
+    String entityString = null;
+    try {
+      response = httpclient.execute(httpPost);
+      HttpEntity entity = response.getEntity();
+      if (response.getStatusLine().getStatusCode() == 200 || response.getStatusLine().getStatusCode() == 201 && entity != null) {
+        entityString = EntityUtils.toString(entity);
+        result = mapper.readValue(entityString, ObjectNode.class);
+      }
+      LOGGER.debug("Writer response:\n{}\n{}\n{}", httpPost.toString(), response.getStatusLine().getStatusCode(), entityString);
+      if ( result == null
+           || (
+              result.get("errors") != null
+                  && result.get("errors").isArray()
+                  && result.get("errors").iterator().hasNext()
+              )
+          ) {
+        LOGGER.error("Write Error: " + result.get("errors"));
+      } else {
+        LOGGER.debug("Write Success");
+      }
+    } catch (IOException ex) {
+      LOGGER.error("IO error:\n{}\n{}\n{}", httpPost.toString(), response, ex.getMessage());
+    } catch (Exception ex) {
+      LOGGER.error("Write Exception:\n{}\n{}\n{}", httpPost.toString(), response, ex.getMessage());
+    } finally {
+      try {
+        if ( response != null) {
+          response.close();
         }
-        return result;
+      } catch (IOException ignored) {
+        LOGGER.trace("ignored IOException", ignored);
+      }
     }
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-
-        super.prepare(configuration);
-        mapper = StreamsJacksonMapper.getInstance();
+  @Override
+  public void prepare(Object configurationObject) {
 
-        if( configuration.getType().equals(GraphHttpConfiguration.Type.NEO_4_J)) {
-            queryGraphHelper = new CypherQueryGraphHelper();
-            httpGraphHelper = new Neo4jHttpGraphHelper();
-        }
+    super.prepare(configuration);
+    mapper = StreamsJacksonMapper.getInstance();
 
-        Preconditions.checkNotNull(queryGraphHelper);
-        Preconditions.checkNotNull(httpGraphHelper);
+    if ( configuration.getType().equals(GraphHttpConfiguration.Type.NEO_4_J)) {
+      queryGraphHelper = new CypherQueryGraphHelper();
+      httpGraphHelper = new Neo4jHttpGraphHelper();
     }
 
-    @Override
-    public void cleanUp() {
+    Preconditions.checkNotNull(queryGraphHelper);
+    Preconditions.checkNotNull(httpGraphHelper);
+  }
 
-        LOGGER.info("exiting");
+  @Override
+  public void cleanUp() {
 
-    }
+    LOGGER.info("exiting");
+
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphVertexReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphVertexReader.java b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphVertexReader.java
index 731159f..7c6e341 100644
--- a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphVertexReader.java
+++ b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/GraphVertexReader.java
@@ -18,10 +18,6 @@
 
 package org.apache.streams.graph;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
 import org.apache.streams.components.http.HttpProviderConfiguration;
 import org.apache.streams.components.http.provider.SimpleHttpProvider;
 import org.apache.streams.config.ComponentConfigurator;
@@ -33,6 +29,12 @@ import org.apache.streams.graph.neo4j.CypherQueryResponse;
 import org.apache.streams.graph.neo4j.ItemData;
 import org.apache.streams.graph.neo4j.ItemMetadata;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -40,76 +42,86 @@ import java.util.List;
 
 /**
  * Reads a stream of activityobjects from vertices in a graph database with
- * an http rest endpoint (such as neo4j)
+ * an http rest endpoint (such as neo4j).
  */
 public class GraphVertexReader extends SimpleHttpProvider implements StreamsPersistReader {
 
-    public static final String STREAMS_ID = GraphVertexReader.class.getCanonicalName();
+  public static final String STREAMS_ID = GraphVertexReader.class.getCanonicalName();
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(GraphVertexReader.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(GraphVertexReader.class);
 
-    protected GraphReaderConfiguration configuration;
+  protected GraphReaderConfiguration configuration;
 
-    private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    public GraphVertexReader() {
-        this(new ComponentConfigurator<GraphReaderConfiguration>(GraphReaderConfiguration.class).detectConfiguration(StreamsConfigurator.config.getConfig("graph")));
-    }
+  /**
+   * GraphVertexReader constructor - resolve GraphReaderConfiguration from JVM 'graph'.
+   */
+  public GraphVertexReader() {
+    this(new ComponentConfigurator<GraphReaderConfiguration>(GraphReaderConfiguration.class).detectConfiguration(StreamsConfigurator.config.getConfig("graph")));
+  }
 
-    public GraphVertexReader(GraphReaderConfiguration configuration) {
-        super(mapper.convertValue(configuration, HttpProviderConfiguration.class));
-        if( configuration.getType().equals(GraphHttpConfiguration.Type.NEO_4_J))
-            super.configuration.setResourcePath("/db/" + configuration.getGraph() + "/transaction/commit");
-        else if( configuration.getType().equals(GraphHttpConfiguration.Type.REXSTER))
-            super.configuration.setResourcePath("/graphs/" + configuration.getGraph());
-        this.configuration = configuration;
+  /**
+   * GraphVertexReader constructor - use supplied GraphReaderConfiguration.
+   * @param configuration GraphReaderConfiguration
+   */
+  public GraphVertexReader(GraphReaderConfiguration configuration) {
+    super(mapper.convertValue(configuration, HttpProviderConfiguration.class));
+    if ( configuration.getType().equals(GraphHttpConfiguration.Type.NEO_4_J)) {
+      super.configuration.setResourcePath("/db/" + configuration.getGraph() + "/transaction/commit");
+    } else if ( configuration.getType().equals(GraphHttpConfiguration.Type.REXSTER)) {
+      super.configuration.setResourcePath("/graphs/" + configuration.getGraph());
     }
+    this.configuration = configuration;
+  }
 
-    /*
-     * Neo API query returns something like this:
-     * { "columns": [ "v" ], "data": [ [ { "data": { props }, etc... } ], [ { "data": { props }, etc... } ] ] }
-     *
-     */
-    public List<ObjectNode> parse(JsonNode jsonNode) {
-        List<ObjectNode> results = Lists.newArrayList();
+  /**
+   * Neo API query returns something like this:
+   * { "columns": [ "v" ], "data": [ [ { "data": { props }, etc... } ], [ { "data": { props }, etc... } ] ] }
+   *
+   * @param jsonNode jsonNode
+   * @return result
+   */
+  public List<ObjectNode> parse(JsonNode jsonNode) {
+    List<ObjectNode> results = Lists.newArrayList();
 
-        ObjectNode root = (ObjectNode) jsonNode;
+    ObjectNode root = (ObjectNode) jsonNode;
 
-        CypherQueryResponse cypherQueryResponse = mapper.convertValue(root, CypherQueryResponse.class);
+    CypherQueryResponse cypherQueryResponse = mapper.convertValue(root, CypherQueryResponse.class);
 
-        for( List<List<ItemMetadata>> dataWrapper : cypherQueryResponse.getData()) {
+    for ( List<List<ItemMetadata>> dataWrapper : cypherQueryResponse.getData()) {
 
-            for (List<ItemMetadata> itemMetadatas : dataWrapper) {
+      for (List<ItemMetadata> itemMetadatas : dataWrapper) {
 
-                for (ItemMetadata itemMetadata : itemMetadatas) {
+        for (ItemMetadata itemMetadata : itemMetadatas) {
 
-                    ItemData itemData = itemMetadata.getData();
+          ItemData itemData = itemMetadata.getData();
 
-                    LOGGER.debug("itemData: " + itemData);
+          LOGGER.debug("itemData: " + itemData);
 
-                    results.add(PropertyUtil.unflattenMap(itemData.getAdditionalProperties(), '.'));
-                }
+          results.add(PropertyUtil.unflattenMap(itemData.getAdditionalProperties(), '.'));
+        }
 
-            }
+      }
 
-        }
-        return results;
     }
+    return results;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-        super.prepare(configurationObject);
+    super.prepare(configurationObject);
 
-    }
+  }
 
-    @Override
-    public StreamsResultSet readAll() {
-        return readCurrent();
-    }
+  @Override
+  public StreamsResultSet readAll() {
+    return readCurrent();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/HttpGraphHelper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/HttpGraphHelper.java b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/HttpGraphHelper.java
index 0833ba0..17b8840 100644
--- a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/HttpGraphHelper.java
+++ b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/HttpGraphHelper.java
@@ -19,8 +19,7 @@
 package org.apache.streams.graph;
 
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
+
 import org.javatuples.Pair;
 
 import java.util.Map;
@@ -31,6 +30,6 @@ import java.util.Map;
  */
 public interface HttpGraphHelper {
 
-    public ObjectNode createHttpRequest(Pair<String, Map<String, Object>> queryPlusParameters);
+  public ObjectNode createHttpRequest(Pair<String, Map<String, Object>> queryPlusParameters);
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/QueryGraphHelper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/QueryGraphHelper.java b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/QueryGraphHelper.java
index eeacdae..1699aee 100644
--- a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/QueryGraphHelper.java
+++ b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/QueryGraphHelper.java
@@ -18,28 +18,27 @@
 
 package org.apache.streams.graph;
 
-import com.fasterxml.jackson.databind.node.ObjectNode;
-
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
 import org.javatuples.Pair;
 
 import java.util.Map;
 
 /**
  * Interface for methods allowing persistance to a graph database which uses a combination
- * DSL
+ * DSL.
  */
 public interface QueryGraphHelper {
 
-    public Pair<String, Map<String, Object>> getVertexRequest(String streamsId);
+  public Pair<String, Map<String, Object>> getVertexRequest(String streamsId);
 
-    public Pair<String, Map<String, Object>> getVertexRequest(Long vertexId);
+  public Pair<String, Map<String, Object>> getVertexRequest(Long vertexId);
 
-    public Pair<String, Map<String, Object>> createVertexRequest(ActivityObject activityObject);
+  public Pair<String, Map<String, Object>> createVertexRequest(ActivityObject activityObject);
 
-    public Pair<String, Map<String, Object>> mergeVertexRequest(ActivityObject activityObject);
+  public Pair<String, Map<String, Object>> mergeVertexRequest(ActivityObject activityObject);
 
-    public Pair<String, Map<String, Object>> createEdgeRequest(Activity activity);
+  public Pair<String, Map<String, Object>> createEdgeRequest(Activity activity);
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/BinaryGraphHelper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/BinaryGraphHelper.java b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/BinaryGraphHelper.java
deleted file mode 100644
index 3dc8ffc..0000000
--- a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/BinaryGraphHelper.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.graph.neo4j;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import org.apache.streams.data.util.PropertyUtil;
-import org.apache.streams.graph.QueryGraphHelper;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
-import org.javatuples.Pair;
-import org.javatuples.Quartet;
-import org.stringtemplate.v4.ST;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Supporting class for interacting with neo4j via rest API
- */
-public class BinaryGraphHelper {
-
-    private final static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    public Pair<String, Map<String, Object>> createVertexRequest(ActivityObject activityObject) {
-
-        Preconditions.checkNotNull(activityObject.getObjectType());
-
-        ObjectNode object = mapper.convertValue(activityObject, ObjectNode.class);
-        Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
-
-        Pair<String, Map<String, Object>> queryPlusParameters = new Pair(props.get("id"), props);
-
-        return queryPlusParameters;
-    }
-
-    public Quartet<String, String, String, Map<String, Object>> createEdgeRequest(Activity activity) {
-
-        ObjectNode object = mapper.convertValue(activity, ObjectNode.class);
-        Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
-
-        Quartet createEdgeRequest = new Quartet(
-                activity.getActor().getId(),
-                activity.getObject().getId(),
-                activity.getId(),
-                props);
-
-        return createEdgeRequest;
-    }
-
-    public static String getPropertyValueSetter(Map<String, Object> map, String symbol) {
-        StringBuilder builder = new StringBuilder();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String)(entry.getValue());
-                builder.append("," + symbol + ".`" + entry.getKey() + "` = '" + propVal + "'");
-            }
-        }
-        return builder.toString();
-    }
-
-    public static String getPropertyParamSetter(Map<String, Object> map, String symbol) {
-        StringBuilder builder = new StringBuilder();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String)(entry.getValue());
-                builder.append("," + symbol + ".`" + entry.getKey() + "` = '" + propVal + "'");
-            }
-        }
-        return builder.toString();
-    }
-
-    public static String getPropertyCreater(Map<String, Object> map) {
-        StringBuilder builder = new StringBuilder();
-        builder.append("{");
-        List<String> parts = Lists.newArrayList();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String) (entry.getValue());
-                parts.add("`"+entry.getKey() + "`:'" + propVal + "'");
-            }
-        }
-        builder.append(Joiner.on(",").join(parts));
-        builder.append("}");
-        return builder.toString();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherGraphHelper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherGraphHelper.java b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherGraphHelper.java
deleted file mode 100644
index 8028350..0000000
--- a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherGraphHelper.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.graph.neo4j;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import org.apache.streams.data.util.PropertyUtil;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
-import org.stringtemplate.v4.ST;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Supporting class for interacting with neo4j via rest API
- */
-public class CypherGraphHelper implements org.apache.streams.graph.GraphHelper {
-
-    private final static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    public final static String statementKey = "statement";
-    public final static String paramsKey = "parameters";
-    public final static String propsKey = "props";
-
-    public final static String getVertexLongIdStatementTemplate = "MATCH (v) WHERE ID(v) = <id> RETURN v";
-    public final static String getVertexStringIdStatementTemplate = "MATCH (v {id: '<id>'} ) RETURN v";
-
-    public final static String createVertexStatementTemplate = "MATCH (x {id: '<id>'}) "+
-                                                                "CREATE UNIQUE (v:<type> { props }) "+
-                                                                "ON CREATE SET v <labels> "+
-                                                                "RETURN v";
-
-    public final static String mergeVertexStatementTemplate = "MERGE (v:<type> {id: '<id>'}) "+
-                                                               "ON CREATE SET v <labels>, v = { props }, v.`@timestamp` = timestamp() "+
-                                                               "ON MATCH SET v <labels>, v = { props }, v.`@timestamp` = timestamp() "+
-                                                               "RETURN v";
-
-    public final static String createEdgeStatementTemplate = "MATCH (s:<s_type> {id: '<s_id>'}),(d:<d_type> {id: '<d_id>'}) "+
-                                                            "CREATE UNIQUE (s)-[r:<r_type> <r_props>]->(d) "+
-                                                            "RETURN r";
-
-    public ObjectNode getVertexRequest(String streamsId) {
-
-        ObjectNode request = mapper.createObjectNode();
-
-        ST getVertex = new ST(getVertexStringIdStatementTemplate);
-        getVertex.add("id", streamsId);
-        request.put(statementKey, getVertex.render());
-
-        return request;
-    }
-
-    @Override
-    public ObjectNode getVertexRequest(Long vertexId) {
-
-        ObjectNode request = mapper.createObjectNode();
-
-        ST getVertex = new ST(getVertexLongIdStatementTemplate);
-        getVertex.add("id", vertexId);
-        request.put(statementKey, getVertex.render());
-
-        return request;
-    }
-
-    public ObjectNode createVertexRequest(ActivityObject activityObject) {
-
-        Preconditions.checkNotNull(activityObject.getObjectType());
-
-        ObjectNode request = mapper.createObjectNode();
-
-        List<String> labels = Lists.newArrayList();
-        if( activityObject.getAdditionalProperties().containsKey("labels") ) {
-            List<String> extraLabels = (List<String>)activityObject.getAdditionalProperties().get("labels");
-            for( String extraLabel : extraLabels )
-                labels.add(":"+extraLabel);
-        }
-
-        ST createVertex = new ST(createVertexStatementTemplate);
-        createVertex.add("id", activityObject.getId());
-        createVertex.add("type", activityObject.getObjectType());
-        createVertex.add("labels", Joiner.on(' ').join(labels));
-        request.put(statementKey, createVertex.render());
-
-        ObjectNode params = mapper.createObjectNode();
-        ObjectNode object = mapper.convertValue(activityObject, ObjectNode.class);
-        ObjectNode props = PropertyUtil.flattenToObjectNode(object, '.');
-        params.put(propsKey, props);
-        request.put(paramsKey, params);
-
-        return request;
-    }
-
-    public ObjectNode mergeVertexRequest(ActivityObject activityObject) {
-
-        Preconditions.checkNotNull(activityObject.getObjectType());
-
-        ObjectNode request = mapper.createObjectNode();
-
-        List<String> labels = Lists.newArrayList();
-        if( activityObject.getAdditionalProperties().containsKey("labels") ) {
-            List<String> extraLabels = (List<String>)activityObject.getAdditionalProperties().get("labels");
-            for( String extraLabel : extraLabels )
-                labels.add(":"+extraLabel);
-        }
-
-        ST mergeVertex = new ST(mergeVertexStatementTemplate);
-        mergeVertex.add("id", activityObject.getId());
-        mergeVertex.add("type", activityObject.getObjectType());
-        mergeVertex.add("labels", Joiner.on(' ').join(labels));
-
-        ObjectNode params = mapper.createObjectNode();
-        ObjectNode object = mapper.convertValue(activityObject, ObjectNode.class);
-        ObjectNode props = PropertyUtil.flattenToObjectNode(object, '.');
-        params.put(propsKey, props);
-        request.put(paramsKey, params);
-
-        String statement = mergeVertex.render();
-
-        request.put(statementKey, statement);
-
-        return request;
-    }
-
-    public ObjectNode createEdgeRequest(Activity activity, ActivityObject source, ActivityObject destination) {
-
-        ObjectNode request = mapper.createObjectNode();
-
-        // set the activityObject's and extensions null, because their properties don't need to appear on the relationship
-        activity.setActor(null);
-        activity.setObject(null);
-        activity.setTarget(null);
-        activity.getAdditionalProperties().put("extensions", null);
-
-        ObjectNode object = mapper.convertValue(activity, ObjectNode.class);
-        Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
-
-        ST mergeEdge = new ST(createEdgeStatementTemplate);
-        mergeEdge.add("s_id", source.getId());
-        mergeEdge.add("s_type", source.getObjectType());
-        mergeEdge.add("d_id", destination.getId());
-        mergeEdge.add("d_type", destination.getObjectType());
-        mergeEdge.add("r_id", activity.getId());
-        mergeEdge.add("r_type", activity.getVerb());
-        mergeEdge.add("r_props", getPropertyCreater(props));
-
-        String statement = mergeEdge.render();
-        request.put(statementKey, statement);
-
-        return request;
-    }
-
-    public static String getPropertyValueSetter(Map<String, Object> map, String symbol) {
-        StringBuilder builder = new StringBuilder();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String)(entry.getValue());
-                builder.append("," + symbol + ".`" + entry.getKey() + "` = '" + propVal + "'");
-            }
-        }
-        return builder.toString();
-    }
-
-    public static String getPropertyParamSetter(Map<String, Object> map, String symbol) {
-        StringBuilder builder = new StringBuilder();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String)(entry.getValue());
-                builder.append("," + symbol + ".`" + entry.getKey() + "` = '" + propVal + "'");
-            }
-        }
-        return builder.toString();
-    }
-
-    public static String getPropertyCreater(Map<String, Object> map) {
-        StringBuilder builder = new StringBuilder();
-        builder.append("{");
-        List<String> parts = Lists.newArrayList();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String) (entry.getValue());
-                parts.add("`"+entry.getKey() + "`:'" + propVal + "'");
-            }
-        }
-        builder.append(Joiner.on(",").join(parts));
-        builder.append("}");
-        return builder.toString();
-    }
-
-}



[37/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchQuery.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchQuery.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchQuery.java
index 3bb4b97..06a6dc8 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchQuery.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/ElasticsearchQuery.java
@@ -18,16 +18,17 @@
 
 package org.apache.streams.elasticsearch;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+
 import org.elasticsearch.action.search.SearchRequestBuilder;
 import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.action.search.SearchType;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.script.Script;
-import org.elasticsearch.search.Scroll;
 import org.elasticsearch.search.SearchHit;
 import org.elasticsearch.search.sort.SortBuilders;
 import org.slf4j.Logger;
@@ -38,190 +39,209 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
+/**
+ * Helper for building, querying, and paging an elasticsearch query.
+ */
 public class ElasticsearchQuery implements Iterable<SearchHit>, Iterator<SearchHit>, Serializable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchQuery.class);
-    private static final int SCROLL_POSITION_NOT_INITIALIZED = -3;
-
-    private ElasticsearchClientManager elasticsearchClientManager;
-    private ElasticsearchReaderConfiguration config;
-    private List<String> indexes = new ArrayList<>();
-    private List<String> types = new ArrayList<>();
-    private int limit = 1000 * 1000 * 1000; // we are going to set the default limit very high to 1bil
-    private int batchSize = 100;
-    private String scrollTimeout = "5m";
-    private org.elasticsearch.index.query.QueryBuilder queryBuilder;
-    private SearchRequestBuilder search;
-    private SearchResponse scrollResp;
-    private int scrollPositionInScroll = SCROLL_POSITION_NOT_INITIALIZED;
-    private SearchHit next = null;
-    private long totalHits = 0;
-    private long totalRead = 0;
-
-    private StreamsJacksonMapper mapper = StreamsJacksonMapper.getInstance();
-
-    public ElasticsearchQuery() {
-        this(new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch")));
-    }
-
-    public ElasticsearchQuery(ElasticsearchReaderConfiguration config) {
-        this.config = config;
-        this.elasticsearchClientManager = new ElasticsearchClientManager(config);
-        this.indexes.addAll(config.getIndexes());
-        this.types.addAll(config.getTypes());
-        this.scrollTimeout = config.getScrollTimeout();
-    }
-
-    public long getHitCount() {
-        return this.search == null ? 0 : this.totalHits;
-    }
-
-    public long getReadCount() {
-        return this.totalRead;
-    }
-
-    public double getReadPercent() {
-        return (double) this.getReadCount() / (double) this.getHitCount();
-    }
-
-    public long getRemainingCount() {
-        return this.totalRead - this.totalHits;
-    }
-
-    public void setBatchSize(int batchSize) {
-        this.batchSize = batchSize;
-    }
-
-    public void setScrollTimeout(String scrollTimeout) {
-        this.scrollTimeout = scrollTimeout;
-    }
-
-    public void setQueryBuilder(QueryBuilder queryBuilder) {
-        this.queryBuilder = queryBuilder;
-    }
-
-    public void execute(Object o) {
-
-        // If we haven't already set up the search, then set up the search.
-        if (search == null) {
-
-            search = elasticsearchClientManager.getClient()
-                    .prepareSearch(indexes.toArray(new String[0]))
-                    .setSearchType(SearchType.SCAN)
-                    .setExplain(true)
-                    .addField("*")
-                    .setFetchSource(true)
-                    .setSize(batchSize)
-                    .setScroll(scrollTimeout)
-                    .addField("_timestamp");
-
-            LOGGER.debug("Search source: " + search.toString());
-
-            String searchJson;
-            if( config.getSearch() != null ) {
-                LOGGER.debug("Have config in Reader: " + config.getSearch().toString());
+  private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchQuery.class);
+  private static final int SCROLL_POSITION_NOT_INITIALIZED = -3;
+
+  private ElasticsearchClientManager elasticsearchClientManager;
+  private ElasticsearchReaderConfiguration config;
+  private List<String> indexes = new ArrayList<>();
+  private List<String> types = new ArrayList<>();
+  private int limit = 1000 * 1000 * 1000; // we are going to set the default limit very high to 1bil
+  private int batchSize = 100;
+  private String scrollTimeout = "5m";
+  private org.elasticsearch.index.query.QueryBuilder queryBuilder;
+  private SearchRequestBuilder search;
+  private SearchResponse scrollResp;
+  private int scrollPositionInScroll = SCROLL_POSITION_NOT_INITIALIZED;
+  private SearchHit next = null;
+  private long totalHits = 0;
+  private long totalRead = 0;
+
+  private StreamsJacksonMapper mapper = StreamsJacksonMapper.getInstance();
+
+  /**
+   * ElasticsearchQuery constructor - resolves ElasticsearchReaderConfiguration from JVM 'elasticsearch'.
+   */
+  public ElasticsearchQuery() {
+    this(new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch")));
+  }
+
+  /**
+   * ElasticsearchQuery constructor - uses provided ElasticsearchReaderConfiguration.
+   */
+  public ElasticsearchQuery(ElasticsearchReaderConfiguration config) {
+    this.config = config;
+    this.elasticsearchClientManager = new ElasticsearchClientManager(config);
+    this.indexes.addAll(config.getIndexes());
+    this.types.addAll(config.getTypes());
+    this.scrollTimeout = config.getScrollTimeout();
+  }
+
+  public long getHitCount() {
+    return this.search == null ? 0 : this.totalHits;
+  }
+
+  public long getReadCount() {
+    return this.totalRead;
+  }
+
+  public double getReadPercent() {
+    return (double) this.getReadCount() / (double) this.getHitCount();
+  }
+
+  public long getRemainingCount() {
+    return this.totalRead - this.totalHits;
+  }
+
+  public void setBatchSize(int batchSize) {
+    this.batchSize = batchSize;
+  }
+
+  public void setScrollTimeout(String scrollTimeout) {
+    this.scrollTimeout = scrollTimeout;
+  }
+
+  public void setQueryBuilder(QueryBuilder queryBuilder) {
+    this.queryBuilder = queryBuilder;
+  }
+
+  /**
+   * execute ElasticsearchQuery.
+   * @param obj deprecated
+   */
+  public void execute(Object obj) {
+
+    // If we haven't already set up the search, then set up the search.
+    if (search == null) {
+
+      search = elasticsearchClientManager.getClient()
+          .prepareSearch(indexes.toArray(new String[0]))
+          .setSearchType(SearchType.SCAN)
+          .setExplain(true)
+          .addField("*")
+          .setFetchSource(true)
+          .setSize(batchSize)
+          .setScroll(scrollTimeout)
+          .addField("_timestamp");
+
+      LOGGER.debug("Search source: " + search.toString());
+
+      String searchJson;
+      if ( config.getSearch() != null ) {
+        LOGGER.debug("Have config in Reader: " + config.getSearch().toString());
 
-                try {
-                    searchJson = mapper.writeValueAsString(config.getSearch());
-                    LOGGER.debug("Extra source: " + searchJson);
-                    search = search.setExtraSource(searchJson);
-
-                } catch (JsonProcessingException e) {
-                    LOGGER.warn("Could not apply _search supplied by config", e.getMessage());
-                }
-
-
-            }
-
-            LOGGER.debug("Final Search: " + search.internalBuilder().toString());
-
-            if (this.queryBuilder != null)
-                search = search.setQuery(this.queryBuilder);
-
-            // If the types are null, then don't specify a type
-            if (this.types != null && this.types.size() > 0)
-                search = search.setTypes(types.toArray(new String[0]));
-
-            // TODO: Replace when all clusters are upgraded past 0.90.4 so we can implement a RANDOM scroll.
-            boolean random = false;
-            if (random)
-                search = search.addSort(SortBuilders.scriptSort(new Script("random()"), "number"));
-        }
-
-        // We don't have a scroll, we need to create a scroll
-        if (scrollResp == null) {
-            scrollResp = search.execute().actionGet();
-            LOGGER.trace(search.toString());
-        }
-    }
-
-    //Iterable methods
-    @Override
-    public Iterator<SearchHit> iterator() {
-        return this;
-    }
-
-    //Iterator methods
-    @Override
-    public SearchHit next() {
-        return this.next;
-    }
-
-    @Override
-    public boolean hasNext() {
-        calcNext();
-        return hasRecords();
-    }
-
-    public void calcNext() {
         try {
-            // We have exhausted our scroll create another scroll.
-            if (scrollPositionInScroll == SCROLL_POSITION_NOT_INITIALIZED || scrollPositionInScroll >= scrollResp.getHits().getHits().length) {
-                // reset the scroll position
-                scrollPositionInScroll = 0;
-
-                // get the next hits of the scroll
-                scrollResp = elasticsearchClientManager.getClient()
-                        .prepareSearchScroll(scrollResp.getScrollId())
-                        .setScroll(scrollTimeout)
-                        .execute()
-                        .actionGet();
-
-                this.totalHits = scrollResp.getHits().getTotalHits();
-            }
-
-            // If this scroll has 0 items then we set the scroll position to -1
-            // letting the iterator know that we are done.
-            if (scrollResp.getHits().getTotalHits() == 0 || scrollResp.getHits().getHits().length == 0)
-                scrollPositionInScroll = -1;
-            else {
-                // get the next record
-                next = scrollResp.getHits().getAt(scrollPositionInScroll);
-
-                // Increment our counters
-                scrollPositionInScroll += 1;
-                totalRead += 1;
-            }
-        } catch (Exception e) {
-            LOGGER.error("Unexpected scrolling error: {}", e.getMessage());
-            scrollPositionInScroll = -1;
-            next = null;
-        }
-    }
+          searchJson = mapper.writeValueAsString(config.getSearch());
+          LOGGER.debug("Extra source: " + searchJson);
+          search = search.setExtraSource(searchJson);
 
-    public void remove() {
-    }
-
-    public void cleanUp() {
-    }
+        } catch (JsonProcessingException ex) {
+          LOGGER.warn("Could not apply _search supplied by config", ex.getMessage());
+        }
 
-    protected boolean isCompleted() {
-        return totalRead >= this.limit && hasRecords();
-    }
 
-    protected boolean hasRecords() {
-        return scrollPositionInScroll != -1 && (!(this.totalRead > this.limit));
-    }
+      }
+
+      LOGGER.debug("Final Search: " + search.internalBuilder().toString());
+
+      if (this.queryBuilder != null) {
+        search = search.setQuery(this.queryBuilder);
+      }
+
+      // If the types are null, then don't specify a type
+      if (this.types != null && this.types.size() > 0) {
+        search = search.setTypes(types.toArray(new String[0]));
+      }
+
+      // TODO: Replace when all clusters are upgraded past 0.90.4 so we can implement a RANDOM scroll.
+      boolean random = false;
+      if (random) {
+        search = search.addSort(SortBuilders.scriptSort(new Script("random()"), "number"));
+      }
+    }
+
+    // We don't have a scroll, we need to create a scroll
+    if (scrollResp == null) {
+      scrollResp = search.execute().actionGet();
+      LOGGER.trace(search.toString());
+    }
+  }
+
+  //Iterable methods
+  @Override
+  public Iterator<SearchHit> iterator() {
+    return this;
+  }
+
+  //Iterator methods
+  @Override
+  public SearchHit next() {
+    return this.next;
+  }
+
+  @Override
+  public boolean hasNext() {
+    calcNext();
+    return hasRecords();
+  }
+
+  /**
+   * shift to next page of scroll.
+   */
+  public void calcNext() {
+    try {
+      // We have exhausted our scroll create another scroll.
+      if (scrollPositionInScroll == SCROLL_POSITION_NOT_INITIALIZED || scrollPositionInScroll >= scrollResp.getHits().getHits().length) {
+        // reset the scroll position
+        scrollPositionInScroll = 0;
+
+        // get the next hits of the scroll
+        scrollResp = elasticsearchClientManager.getClient()
+            .prepareSearchScroll(scrollResp.getScrollId())
+            .setScroll(scrollTimeout)
+            .execute()
+            .actionGet();
+
+        this.totalHits = scrollResp.getHits().getTotalHits();
+      }
+
+      // If this scroll has 0 items then we set the scroll position to -1
+      // letting the iterator know that we are done.
+      if (scrollResp.getHits().getTotalHits() == 0 || scrollResp.getHits().getHits().length == 0) {
+        scrollPositionInScroll = -1;
+      } else {
+        // get the next record
+        next = scrollResp.getHits().getAt(scrollPositionInScroll);
+
+        // Increment our counters
+        scrollPositionInScroll += 1;
+        totalRead += 1;
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Unexpected scrolling error: {}", ex.getMessage());
+      scrollPositionInScroll = -1;
+      next = null;
+    }
+  }
+
+  public void remove() {
+  }
+
+  public void cleanUp() {
+  }
+
+  protected boolean isCompleted() {
+    return totalRead >= this.limit && hasRecords();
+  }
+
+  protected boolean hasRecords() {
+    return scrollPositionInScroll != -1 && (!(this.totalRead > this.limit));
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataAsDocumentProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataAsDocumentProcessor.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataAsDocumentProcessor.java
index 26012ef..6ce15d4 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataAsDocumentProcessor.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataAsDocumentProcessor.java
@@ -18,15 +18,6 @@
 
 package org.apache.streams.elasticsearch.processor;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
-import com.typesafe.config.Config;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
@@ -35,90 +26,104 @@ import org.apache.streams.elasticsearch.ElasticsearchClientManager;
 import org.apache.streams.elasticsearch.ElasticsearchMetadataUtil;
 import org.apache.streams.elasticsearch.ElasticsearchReaderConfiguration;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
+import com.typesafe.config.Config;
+
 import org.elasticsearch.action.get.GetRequestBuilder;
 import org.elasticsearch.action.get.GetResponse;
 import org.joda.time.DateTime;
 
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
 /**
- * Uses index and type in metadata map stored in datum document to populate current document into datums
+ * Uses index and type in metadata map stored in datum document to populate current document into datums.
  */
 public class DatumFromMetadataAsDocumentProcessor implements StreamsProcessor, Serializable {
 
-    private final static String STREAMS_ID = "DatumFromMetadataProcessor";
+  private static final String STREAMS_ID = "DatumFromMetadataProcessor";
 
-    private ElasticsearchClientManager elasticsearchClientManager;
-    private ElasticsearchReaderConfiguration config;
+  private ElasticsearchClientManager elasticsearchClientManager;
+  private ElasticsearchReaderConfiguration config;
 
-    private ObjectMapper mapper;
+  private ObjectMapper mapper;
 
-    public DatumFromMetadataAsDocumentProcessor() {
-        this.config = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch"));
-    }
+  public DatumFromMetadataAsDocumentProcessor() {
+    this.config = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch"));
+  }
 
-    public DatumFromMetadataAsDocumentProcessor(Config config) {
-        this.config = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch"));
-    }
+  public DatumFromMetadataAsDocumentProcessor(Config config) {
+    this.config = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch"));
+  }
 
-    public DatumFromMetadataAsDocumentProcessor(ElasticsearchReaderConfiguration config) {
-        this.config = config;
-    }
+  public DatumFromMetadataAsDocumentProcessor(ElasticsearchReaderConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        List<StreamsDatum> result = new ArrayList<>();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    List<StreamsDatum> result = new ArrayList<>();
 
-        ObjectNode metadataObjectNode;
-        try {
-            metadataObjectNode = mapper.readValue((String) entry.getDocument(), ObjectNode.class);
-        } catch (IOException e) {
-            return result;
-        }
+    ObjectNode metadataObjectNode;
+    try {
+      metadataObjectNode = mapper.readValue((String) entry.getDocument(), ObjectNode.class);
+    } catch (IOException ex) {
+      return result;
+    }
 
-        Map<String, Object> metadata = ElasticsearchMetadataUtil.asMap(metadataObjectNode);
+    Map<String, Object> metadata = ElasticsearchMetadataUtil.asMap(metadataObjectNode);
 
-        if(entry.getMetadata() == null)
-            return result;
+    if (entry.getMetadata() == null) {
+      return result;
+    }
 
-        String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
-        String type = ElasticsearchMetadataUtil.getType(metadata, config);
-        String id = ElasticsearchMetadataUtil.getId(metadata);
+    String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
+    String type = ElasticsearchMetadataUtil.getType(metadata, config);
+    String id = ElasticsearchMetadataUtil.getId(metadata);
 
-        GetRequestBuilder getRequestBuilder = elasticsearchClientManager.getClient().prepareGet(index, type, id);
-        getRequestBuilder.setFields("*", "_timestamp");
-        getRequestBuilder.setFetchSource(true);
-        GetResponse getResponse = getRequestBuilder.get();
+    GetRequestBuilder getRequestBuilder = elasticsearchClientManager.getClient().prepareGet(index, type, id);
+    getRequestBuilder.setFields("*", "_timestamp");
+    getRequestBuilder.setFetchSource(true);
+    GetResponse getResponse = getRequestBuilder.get();
 
-        if( getResponse == null || !getResponse.isExists() || getResponse.isSourceEmpty())
-            return result;
+    if ( getResponse == null || !getResponse.isExists() || getResponse.isSourceEmpty()) {
+      return result;
+    }
 
-        entry.setDocument(getResponse.getSource());
-        if( getResponse.getField("_timestamp") != null) {
-            DateTime timestamp = new DateTime(((Long) getResponse.getField("_timestamp").getValue()).longValue());
-            entry.setTimestamp(timestamp);
-        }
+    entry.setDocument(getResponse.getSource());
+    if ( getResponse.getField("_timestamp") != null) {
+      DateTime timestamp = new DateTime(((Long) getResponse.getField("_timestamp").getValue()).longValue());
+      entry.setTimestamp(timestamp);
+    }
 
-        result.add(entry);
+    result.add(entry);
 
-        return result;
-    }
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        this.elasticsearchClientManager = new ElasticsearchClientManager(config);
-        mapper = StreamsJacksonMapper.getInstance();
-        mapper.registerModule(new JsonOrgModule());
-    }
+  @Override
+  public void prepare(Object configurationObject) {
+    this.elasticsearchClientManager = new ElasticsearchClientManager(config);
+    mapper = StreamsJacksonMapper.getInstance();
+    mapper.registerModule(new JsonOrgModule());
+  }
 
-    @Override
-    public void cleanUp() {
-        this.elasticsearchClientManager.getClient().close();
-    }
+  @Override
+  public void cleanUp() {
+    this.elasticsearchClientManager.getClient().close();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataProcessor.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataProcessor.java
index 7897e8d..bef190e 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataProcessor.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DatumFromMetadataProcessor.java
@@ -18,7 +18,6 @@
 
 package org.apache.streams.elasticsearch.processor;
 
-import com.typesafe.config.Config;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
@@ -26,6 +25,9 @@ import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.elasticsearch.ElasticsearchClientManager;
 import org.apache.streams.elasticsearch.ElasticsearchMetadataUtil;
 import org.apache.streams.elasticsearch.ElasticsearchReaderConfiguration;
+
+import com.typesafe.config.Config;
+
 import org.elasticsearch.action.get.GetRequestBuilder;
 import org.elasticsearch.action.get.GetResponse;
 import org.joda.time.DateTime;
@@ -36,74 +38,76 @@ import java.util.List;
 import java.util.Map;
 
 /**
- * Uses index and type in metadata to populate current document into datums
+ * Uses index and type in metadata to populate current document into datums.
  */
 public class DatumFromMetadataProcessor implements StreamsProcessor, Serializable {
 
-    private final static String STREAMS_ID = "DatumFromMetadataProcessor";
+  private static final String STREAMS_ID = "DatumFromMetadataProcessor";
 
-    private ElasticsearchClientManager elasticsearchClientManager;
-    private ElasticsearchReaderConfiguration config;
+  private ElasticsearchClientManager elasticsearchClientManager;
+  private ElasticsearchReaderConfiguration config;
 
-    public DatumFromMetadataProcessor() {
-        this.config = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch"));
-    }
+  public DatumFromMetadataProcessor() {
+    this.config = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch"));
+  }
 
-    public DatumFromMetadataProcessor(Config config) {
-        this.config = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch"));
-    }
+  public DatumFromMetadataProcessor(Config config) {
+    this.config = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("elasticsearch"));
+  }
 
-    public DatumFromMetadataProcessor(ElasticsearchReaderConfiguration config) {
-        this.config = config;
-    }
+  public DatumFromMetadataProcessor(ElasticsearchReaderConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        List<StreamsDatum> result = new ArrayList<>();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    List<StreamsDatum> result = new ArrayList<>();
 
-        if(entry == null || entry.getMetadata() == null)
-            return result;
+    if (entry == null || entry.getMetadata() == null) {
+      return result;
+    }
 
-        Map<String, Object> metadata = entry.getMetadata();
+    Map<String, Object> metadata = entry.getMetadata();
 
-        String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
-        String type = ElasticsearchMetadataUtil.getType(metadata, config);
-        String id = ElasticsearchMetadataUtil.getId(entry);
+    String index = ElasticsearchMetadataUtil.getIndex(metadata, config);
+    String type = ElasticsearchMetadataUtil.getType(metadata, config);
+    String id = ElasticsearchMetadataUtil.getId(entry);
 
-        GetRequestBuilder getRequestBuilder = elasticsearchClientManager.getClient().prepareGet(index, type, id);
-        getRequestBuilder.setFields("*", "_timestamp");
-        getRequestBuilder.setFetchSource(true);
-        GetResponse getResponse = getRequestBuilder.get();
+    GetRequestBuilder getRequestBuilder = elasticsearchClientManager.getClient().prepareGet(index, type, id);
+    getRequestBuilder.setFields("*", "_timestamp");
+    getRequestBuilder.setFetchSource(true);
+    GetResponse getResponse = getRequestBuilder.get();
 
-        if( getResponse == null || !getResponse.isExists() || getResponse.isSourceEmpty() )
-            return result;
+    if ( getResponse == null || !getResponse.isExists() || getResponse.isSourceEmpty() ) {
+      return result;
+    }
 
-        entry.setDocument(getResponse.getSource());
-        if( getResponse.getField("_timestamp") != null) {
-            DateTime timestamp = new DateTime(((Long) getResponse.getField("_timestamp").getValue()).longValue());
-            entry.setTimestamp(timestamp);
-        }
+    entry.setDocument(getResponse.getSource());
+    if ( getResponse.getField("_timestamp") != null) {
+      DateTime timestamp = new DateTime(((Long) getResponse.getField("_timestamp").getValue()).longValue());
+      entry.setTimestamp(timestamp);
+    }
 
-        result.add(entry);
+    result.add(entry);
 
-        return result;
-    }
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        this.elasticsearchClientManager = new ElasticsearchClientManager(config);
+  @Override
+  public void prepare(Object configurationObject) {
+    this.elasticsearchClientManager = new ElasticsearchClientManager(config);
 
-    }
+  }
 
-    @Override
-    public void cleanUp() {
-        this.elasticsearchClientManager.getClient().close();
-    }
+  @Override
+  public void cleanUp() {
+    this.elasticsearchClientManager.getClient().close();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DocumentToMetadataProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DocumentToMetadataProcessor.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DocumentToMetadataProcessor.java
index 9a08654..2a64fbc 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DocumentToMetadataProcessor.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/DocumentToMetadataProcessor.java
@@ -18,13 +18,15 @@
 
 package org.apache.streams.elasticsearch.processor;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.elasticsearch.ElasticsearchMetadataUtil;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,60 +38,62 @@ import java.util.Map;
 /**
  * Moves a json representation of metadata out of the document to the metadata field.
  *
+ * <p/>
  * This is useful if you have a list of document metadata references in the document field,
  * for example loaded from a file, and need them in the metadata field.
  */
 public class DocumentToMetadataProcessor implements StreamsProcessor, Serializable {
 
-    private final static String STREAMS_ID = "DatumFromMetadataProcessor";
+  private static final String STREAMS_ID = "DatumFromMetadataProcessor";
 
-    private ObjectMapper mapper;
+  private ObjectMapper mapper;
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(DocumentToMetadataProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(DocumentToMetadataProcessor.class);
 
-    public DocumentToMetadataProcessor() {
-    }
+  public DocumentToMetadataProcessor() {
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        List<StreamsDatum> result = new ArrayList<>();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    List<StreamsDatum> result = new ArrayList<>();
 
-        Object object = entry.getDocument();
-        ObjectNode metadataObjectNode;
-        try {
-            String docAsJson = (object instanceof String) ? object.toString() : mapper.writeValueAsString(object);
-            metadataObjectNode = mapper.readValue(docAsJson, ObjectNode.class);
-        } catch (Throwable e) {
-            LOGGER.warn("Exception: %s", e.getMessage());
-            return result;
-        }
+    Object object = entry.getDocument();
+    ObjectNode metadataObjectNode;
+    try {
+      String docAsJson = (object instanceof String) ? object.toString() : mapper.writeValueAsString(object);
+      metadataObjectNode = mapper.readValue(docAsJson, ObjectNode.class);
+    } catch (Throwable ex) {
+      LOGGER.warn("Exception: %s", ex.getMessage());
+      return result;
+    }
 
-        Map<String, Object> metadata = ElasticsearchMetadataUtil.asMap(metadataObjectNode);
+    Map<String, Object> metadata = ElasticsearchMetadataUtil.asMap(metadataObjectNode);
 
-        if(metadata == null)
-            return result;
+    if ( metadata == null ) {
+      return result;
+    }
 
-        entry.setMetadata(metadata);
+    entry.setMetadata(metadata);
 
-        result.add(entry);
+    result.add(entry);
 
-        return result;
-    }
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        mapper = StreamsJacksonMapper.getInstance();
-        mapper.registerModule(new JsonOrgModule());
-    }
+  @Override
+  public void prepare(Object configurationObject) {
+    mapper = StreamsJacksonMapper.getInstance();
+    mapper.registerModule(new JsonOrgModule());
+  }
 
-    @Override
-    public void cleanUp() {
-        mapper = null;
-    }
+  @Override
+  public void cleanUp() {
+    mapper = null;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/MetadataFromDocumentProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/MetadataFromDocumentProcessor.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/MetadataFromDocumentProcessor.java
index e9aa900..721ad42 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/MetadataFromDocumentProcessor.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/MetadataFromDocumentProcessor.java
@@ -18,16 +18,17 @@
 
 package org.apache.streams.elasticsearch.processor;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
 import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.elasticsearch.ElasticsearchMetadataUtil;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,82 +40,94 @@ import java.util.Map;
 /**
  * Examines document to derive metadata fields.
  *
+ * </p>
  * This is useful if you have a document with a populated 'id', and 'verb' or 'objectType' fields you want
  * to use as _id and _type respectively when indexing.
  */
 public class MetadataFromDocumentProcessor implements StreamsProcessor, Serializable {
 
-    public final static String STREAMS_ID = "MetadataFromDocumentProcessor";
+  public static final String STREAMS_ID = "MetadataFromDocumentProcessor";
 
-    private ObjectMapper mapper;
+  private ObjectMapper mapper;
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(MetadataFromDocumentProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(MetadataFromDocumentProcessor.class);
 
-    public MetadataFromDocumentProcessor() {
-    }
+  public MetadataFromDocumentProcessor() {
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+
+    if ( mapper == null ) {
+      mapper = StreamsJacksonMapper.getInstance();
     }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-
-        if( mapper == null ) mapper = StreamsJacksonMapper.getInstance();
-
-        List<StreamsDatum> result = Lists.newArrayList();
-
-        Map<String, Object> metadata = entry.getMetadata();
-        if( metadata == null ) metadata = Maps.newHashMap();
-
-        String id = null;
-        String type = null;
-
-        Object document = entry.getDocument();
-        ObjectNode objectNode = null;
-        if( document instanceof String) {
-            try {
-                objectNode = mapper.readValue((String) document, ObjectNode.class);
-            } catch (IOException e) {
-                LOGGER.warn("Can't deserialize to determine metadata", e);
-            }
-        } else {
-            try {
-                objectNode = mapper.convertValue(document, ObjectNode.class);
-            } catch (Exception e) {
-                LOGGER.warn("Can't deserialize to determine metadata", e);
-            }
-        }
-        if( objectNode != null ) {
-            if (objectNode.has("id"))
-                id = objectNode.get("id").textValue();
-            if (objectNode.has("verb"))
-                type = objectNode.get("verb").textValue();
-            if (objectNode.has("objectType"))
-                type = objectNode.get("objectType").textValue();
-        }
-
-        if( !Strings.isNullOrEmpty(id) ) metadata.put("id", id);
-        if( !Strings.isNullOrEmpty(type) ) metadata.put("type", type);
-
-        entry.setId(id);
-        entry.setMetadata(metadata);
-
-        result.add(entry);
-
-        return result;
+    List<StreamsDatum> result = Lists.newArrayList();
+
+    Map<String, Object> metadata = entry.getMetadata();
+    if ( metadata == null ) {
+      metadata = Maps.newHashMap();
     }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        mapper = StreamsJacksonMapper.getInstance();
-        mapper.registerModule(new JsonOrgModule());
+    String id = null;
+    String type = null;
+
+    Object document = entry.getDocument();
+    ObjectNode objectNode = null;
+    if ( document instanceof String) {
+      try {
+        objectNode = mapper.readValue((String) document, ObjectNode.class);
+      } catch (IOException ex) {
+        LOGGER.warn("Can't deserialize to determine metadata", ex);
+      }
+    } else {
+      try {
+        objectNode = mapper.convertValue(document, ObjectNode.class);
+      } catch (Exception ex) {
+        LOGGER.warn("Can't deserialize to determine metadata", ex);
+      }
+    }
+    if ( objectNode != null ) {
+      if (objectNode.has("id")) {
+        id = objectNode.get("id").textValue();
+      }
+      if (objectNode.has("verb")) {
+        type = objectNode.get("verb").textValue();
+      }
+      if (objectNode.has("objectType")) {
+        type = objectNode.get("objectType").textValue();
+      }
     }
 
-    @Override
-    public void cleanUp() {
-        mapper = null;
+    if ( !Strings.isNullOrEmpty(id) ) {
+      metadata.put("id", id);
     }
+    if ( !Strings.isNullOrEmpty(type) ) {
+      metadata.put("type", type);
+    }
+
+    entry.setId(id);
+    entry.setMetadata(metadata);
+
+    result.add(entry);
+
+    return result;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    mapper = StreamsJacksonMapper.getInstance();
+    mapper.registerModule(new JsonOrgModule());
+  }
+
+  @Override
+  public void cleanUp() {
+    mapper = null;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessor.java b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessor.java
index f37527a..69394ee 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessor.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/main/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessor.java
@@ -18,13 +18,6 @@
 
 package org.apache.streams.elasticsearch.processor;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.fasterxml.jackson.databind.node.JsonNodeFactory;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.elasticsearch.ElasticsearchClientManager;
@@ -33,6 +26,15 @@ import org.apache.streams.elasticsearch.ElasticsearchWriterConfiguration;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
 import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
 import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
 import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
@@ -51,7 +53,11 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Queue;
+import java.util.Set;
 
 /**
  * References:
@@ -65,287 +71,304 @@ import java.util.*;
 
 public class PercolateTagProcessor implements StreamsProcessor {
 
-    public static final String STREAMS_ID = "PercolateTagProcessor";
-    private final static Logger LOGGER = LoggerFactory.getLogger(PercolateTagProcessor.class);
-    private final static String DEFAULT_PERCOLATE_FIELD = "_all";
-
-    private ObjectMapper mapper;
-
-    protected Queue<StreamsDatum> inQueue;
-    protected Queue<StreamsDatum> outQueue;
-
-    public String TAGS_EXTENSION = "tags";
-
-    private ElasticsearchWriterConfiguration config;
-    private ElasticsearchClientManager manager;
-    private BulkRequestBuilder bulkBuilder;
-    protected String usePercolateField;
-
-    public PercolateTagProcessor(ElasticsearchWriterConfiguration config) {
-        this(config, DEFAULT_PERCOLATE_FIELD);
+  public static final String STREAMS_ID = "PercolateTagProcessor";
+  private static final Logger LOGGER = LoggerFactory.getLogger(PercolateTagProcessor.class);
+  private static final String DEFAULT_PERCOLATE_FIELD = "_all";
+
+  private ObjectMapper mapper;
+
+  protected Queue<StreamsDatum> inQueue;
+  protected Queue<StreamsDatum> outQueue;
+
+  public static final String TAGS_EXTENSION = "tags";
+
+  private ElasticsearchWriterConfiguration config;
+  private ElasticsearchClientManager manager;
+  private BulkRequestBuilder bulkBuilder;
+  protected String usePercolateField;
+
+  public PercolateTagProcessor(ElasticsearchWriterConfiguration config) {
+    this(config, DEFAULT_PERCOLATE_FIELD);
+  }
+
+  public PercolateTagProcessor(ElasticsearchWriterConfiguration config, String defaultPercolateField) {
+    this.config = config;
+    this.usePercolateField = defaultPercolateField;
+  }
+
+  public ElasticsearchClientManager getManager() {
+    return manager;
+  }
+
+  public void setManager(ElasticsearchClientManager manager) {
+    this.manager = manager;
+  }
+
+  public ElasticsearchConfiguration getConfig() {
+    return config;
+  }
+
+  public void setConfig(ElasticsearchWriterConfiguration config) {
+    this.config = config;
+  }
+
+  public Queue<StreamsDatum> getProcessorOutputQueue() {
+    return outQueue;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+
+    List<StreamsDatum> result = Lists.newArrayList();
+
+    String json;
+    ObjectNode node;
+    // first check for valid json
+    if (entry.getDocument() instanceof String) {
+      json = (String) entry.getDocument();
+      try {
+        node = (ObjectNode) mapper.readTree(json);
+      } catch (IOException ex) {
+        ex.printStackTrace();
+        return null;
+      }
+    } else if (entry.getDocument() instanceof ObjectNode) {
+      node = (ObjectNode) entry.getDocument();
+      try {
+        json = mapper.writeValueAsString(node);
+      } catch (JsonProcessingException ex) {
+        LOGGER.warn("Invalid datum: ", node);
+        return null;
+      }
+    } else {
+      LOGGER.warn("Incompatible document type: ", entry.getDocument().getClass());
+      return null;
     }
 
-    public PercolateTagProcessor(ElasticsearchWriterConfiguration config, String defaultPercolateField) {
-        this.config = config;
-        this.usePercolateField = defaultPercolateField;
+    StringBuilder percolateRequestJson = new StringBuilder();
+    percolateRequestJson.append("{ \"doc\": ");
+    percolateRequestJson.append(json);
+    //percolateRequestJson.append("{ \"content\" : \"crazy good shit\" }");
+    percolateRequestJson.append("}");
+
+    PercolateRequestBuilder request;
+    PercolateResponse response;
+
+    try {
+      LOGGER.trace("Percolate request json: {}", percolateRequestJson.toString());
+      request = manager.getClient().preparePercolate().setIndices(config.getIndex()).setDocumentType(config.getType()).setSource(percolateRequestJson.toString());
+      LOGGER.trace("Percolate request: {}", mapper.writeValueAsString(request.request()));
+      response = request.execute().actionGet();
+      LOGGER.trace("Percolate response: {} matches", response.getMatches().length);
+    } catch (Exception ex) {
+      LOGGER.warn("Percolate exception: {}", ex.getMessage());
+      return null;
     }
 
-    public ElasticsearchClientManager getManager() {
-        return manager;
-    }
+    ArrayNode tagArray = JsonNodeFactory.instance.arrayNode();
 
-    public void setManager(ElasticsearchClientManager manager) {
-        this.manager = manager;
+    Iterator<PercolateResponse.Match> matchIterator = response.iterator();
+    while (matchIterator.hasNext()) {
+      tagArray.add(matchIterator.next().getId().string());
     }
 
-    public ElasticsearchConfiguration getConfig() {
-        return config;
-    }
+    LOGGER.trace("Percolate matches: {}", tagArray);
 
-    public void setConfig(ElasticsearchWriterConfiguration config) {
-        this.config = config;
-    }
+    Activity activity = mapper.convertValue(node, Activity.class);
 
-    public Queue<StreamsDatum> getProcessorOutputQueue() {
-        return outQueue;
-    }
+    appendMatches(tagArray, activity);
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+    entry.setDocument(activity);
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-
-        List<StreamsDatum> result = Lists.newArrayList();
-
-        String json;
-        ObjectNode node;
-        // first check for valid json
-        if (entry.getDocument() instanceof String) {
-            json = (String) entry.getDocument();
-            try {
-                node = (ObjectNode) mapper.readTree(json);
-            } catch (IOException e) {
-                e.printStackTrace();
-                return null;
-            }
-        } else if (entry.getDocument() instanceof ObjectNode) {
-            node = (ObjectNode) entry.getDocument();
-            try {
-                json = mapper.writeValueAsString(node);
-            } catch (JsonProcessingException e) {
-                LOGGER.warn("Invalid datum: ", node);
-                return null;
-            }
-        } else {
-            LOGGER.warn("Incompatible document type: ", entry.getDocument().getClass());
-            return null;
-        }
-
-        StringBuilder percolateRequestJson = new StringBuilder();
-        percolateRequestJson.append("{ \"doc\": ");
-        percolateRequestJson.append(json);
-        //percolateRequestJson.append("{ \"content\" : \"crazy good shit\" }");
-        percolateRequestJson.append("}");
-
-        PercolateRequestBuilder request;
-        PercolateResponse response;
-
-        try {
-            LOGGER.trace("Percolate request json: {}", percolateRequestJson.toString());
-            request = manager.getClient().preparePercolate().setIndices(config.getIndex()).setDocumentType(config.getType()).setSource(percolateRequestJson.toString());
-            LOGGER.trace("Percolate request: {}", mapper.writeValueAsString(request.request()));
-            response = request.execute().actionGet();
-            LOGGER.trace("Percolate response: {} matches", response.getMatches().length);
-        } catch (Exception e) {
-            LOGGER.warn("Percolate exception: {}", e.getMessage());
-            return null;
-        }
-
-        ArrayNode tagArray = JsonNodeFactory.instance.arrayNode();
-
-        Iterator<PercolateResponse.Match> matchIterator = response.iterator();
-        while(matchIterator.hasNext()) {
-            tagArray.add(matchIterator.next().getId().string());
-        }
-
-        LOGGER.trace("Percolate matches: {}", tagArray);
-
-        Activity activity = mapper.convertValue(node, Activity.class);
-
-        appendMatches(tagArray, activity);
-
-        entry.setDocument(activity);
-
-        result.add(entry);
-
-        return result;
+    result.add(entry);
 
-    }
+    return result;
 
-    protected void appendMatches(ArrayNode tagArray, Activity activity) {
+  }
 
-        ExtensionUtil.getInstance().addExtension(activity, TAGS_EXTENSION, tagArray);
+  protected void appendMatches(ArrayNode tagArray, Activity activity) {
 
-    }
+    ExtensionUtil.getInstance().addExtension(activity, TAGS_EXTENSION, tagArray);
 
-    @Override
-    public void prepare(Object o) {
+  }
 
-        mapper = StreamsJacksonMapper.getInstance();
+  @Override
+  public void prepare(Object configuration) {
 
-        Preconditions.checkNotNull(config);
+    mapper = StreamsJacksonMapper.getInstance();
 
-        manager = new ElasticsearchClientManager(config);
+    Preconditions.checkNotNull(config);
 
-        if( config.getTags() != null && config.getTags().getAdditionalProperties().size() > 0) {
-            // initial write tags to index
-            createIndexIfMissing(config.getIndex());
-            if( config.getReplaceTags() == true ) {
-                deleteOldQueries(config.getIndex());
-            }
-            for (String tag : config.getTags().getAdditionalProperties().keySet()) {
-                String query = (String) config.getTags().getAdditionalProperties().get(tag);
-                PercolateQueryBuilder queryBuilder = new PercolateQueryBuilder(tag, query, this.usePercolateField);
-                addPercolateRule(queryBuilder, config.getIndex());
-            }
-            bulkBuilder = manager.getClient().prepareBulk();
+    manager = new ElasticsearchClientManager(config);
 
-            if (writePercolateRules() == true)
-                LOGGER.info("wrote " + bulkBuilder.numberOfActions() + " tags to " + config.getIndex() + " _percolator");
-            else
-                LOGGER.error("FAILED writing " + bulkBuilder.numberOfActions() + " tags to " + config.getIndex() + " _percolator");
-        }
+    if ( config.getTags() != null && config.getTags().getAdditionalProperties().size() > 0) {
+      // initial write tags to index
+      createIndexIfMissing(config.getIndex());
+      if ( config.getReplaceTags() == true ) {
+        deleteOldQueries(config.getIndex());
+      }
+      for (String tag : config.getTags().getAdditionalProperties().keySet()) {
+        String query = (String) config.getTags().getAdditionalProperties().get(tag);
+        PercolateQueryBuilder queryBuilder = new PercolateQueryBuilder(tag, query, this.usePercolateField);
+        addPercolateRule(queryBuilder, config.getIndex());
+      }
+      bulkBuilder = manager.getClient().prepareBulk();
 
+      if (writePercolateRules() == true) {
+        LOGGER.info("wrote " + bulkBuilder.numberOfActions() + " tags to " + config.getIndex() + " _percolator");
+      } else {
+        LOGGER.error("FAILED writing " + bulkBuilder.numberOfActions() + " tags to " + config.getIndex() + " _percolator");
+      }
     }
 
-    @Override
-    public void cleanUp() {
-        if( config.getCleanupTags() == true )
-            deleteOldQueries(config.getIndex());
-        manager.getClient().close();
-    }
+  }
 
-    public int numOfPercolateRules() {
-        return this.bulkBuilder.numberOfActions();
+  @Override
+  public void cleanUp() {
+    if ( config.getCleanupTags() == true ) {
+      deleteOldQueries(config.getIndex());
     }
-
-    public void createIndexIfMissing(String indexName) {
-        if (!this.manager.getClient()
-                .admin()
-                .indices()
-                .exists(new IndicesExistsRequest(indexName))
-                .actionGet()
-                .isExists()) {
-            // It does not exist... So we are going to need to create the index.
-            // we are going to assume that the 'templates' that we have loaded into
-            // elasticsearch are sufficient to ensure the index is being created properly.
-            CreateIndexResponse response = this.manager.getClient().admin().indices().create(new CreateIndexRequest(indexName)).actionGet();
-
-            if (response.isAcknowledged()) {
-                LOGGER.info("Index {} did not exist. The index was automatically created from the stored ElasticSearch Templates.", indexName);
-            } else {
-                LOGGER.error("Index {} did not exist. While attempting to create the index from stored ElasticSearch Templates we were unable to get an acknowledgement.", indexName);
-                LOGGER.error("Error Message: {}", response.toString());
-                throw new RuntimeException("Unable to create index " + indexName);
-            }
-        }
+    manager.getClient().close();
+  }
+
+  public int numOfPercolateRules() {
+    return this.bulkBuilder.numberOfActions();
+  }
+
+  /**
+   * createIndexIfMissing.
+   * @param indexName indexName
+   */
+  public void createIndexIfMissing(String indexName) {
+    if (!this.manager.getClient()
+        .admin()
+        .indices()
+        .exists(new IndicesExistsRequest(indexName))
+        .actionGet()
+        .isExists()) {
+      // It does not exist... So we are going to need to create the index.
+      // we are going to assume that the 'templates' that we have loaded into
+      // elasticsearch are sufficient to ensure the index is being created properly.
+      CreateIndexResponse response = this.manager.getClient().admin().indices().create(new CreateIndexRequest(indexName)).actionGet();
+
+      if (response.isAcknowledged()) {
+        LOGGER.info("Index {} did not exist. The index was automatically created from the stored ElasticSearch Templates.", indexName);
+      } else {
+        LOGGER.error("Index {} did not exist. While attempting to create the index from stored ElasticSearch Templates we were unable to get an acknowledgement.", indexName);
+        LOGGER.error("Error Message: {}", response.toString());
+        throw new RuntimeException("Unable to create index " + indexName);
+      }
     }
-
-    public void addPercolateRule(PercolateQueryBuilder builder, String index) {
-        this.bulkBuilder.add(manager.getClient().prepareIndex(index, ".percolator", builder.getId())
-                .setSource(builder.getSource()));
+  }
+
+  public void addPercolateRule(PercolateQueryBuilder builder, String index) {
+    this.bulkBuilder.add(manager.getClient().prepareIndex(index, ".percolator", builder.getId())
+        .setSource(builder.getSource()));
+  }
+
+  /**
+   *
+   * @return returns true if all rules were addded. False indicates one or more rules have failed.
+   */
+  public boolean writePercolateRules() {
+    if (this.numOfPercolateRules() < 0) {
+      throw new RuntimeException("No Rules Have been added!");
     }
-
-    /**
-     *
-     * @return returns true if all rules were addded. False indicates one or more rules have failed.
-     */
-    public boolean writePercolateRules() {
-        if(this.numOfPercolateRules() < 0) {
-            throw new RuntimeException("No Rules Have been added!");
-        }
-        BulkResponse response = this.bulkBuilder.execute().actionGet();
-        for(BulkItemResponse r : response.getItems()) {
-            if(r.isFailed()) {
-                LOGGER.error(r.getId()+"\t"+r.getFailureMessage());
-            }
-        }
-        return !response.hasFailures();
+    BulkResponse response = this.bulkBuilder.execute().actionGet();
+    for (BulkItemResponse r : response.getItems()) {
+      if (r.isFailed()) {
+        LOGGER.error(r.getId() + "\t" + r.getFailureMessage());
+      }
     }
-
-    /**
-     *
-     * @param ids
-     * @param index
-     * @return  Returns true if all of the old tags were removed. False indicates one or more tags were not removed.
-     */
-    public boolean removeOldTags(Set<String> ids, String index) {
-        if(ids.size() == 0) {
-            return false;
-        }
-        BulkRequestBuilder bulk = manager.getClient().prepareBulk();
-        for(String id : ids) {
-            bulk.add(manager.getClient().prepareDelete("_percolator", index, id));
-        }
-        return !bulk.execute().actionGet().hasFailures();
+    return !response.hasFailures();
+  }
+
+  /**
+   * Attempt to removeOldTags.
+   * @param ids ids
+   * @param index index
+   * @return Returns true if all of the old tags were removed. False indicates one or more tags were not removed.
+   */
+  public boolean removeOldTags(Set<String> ids, String index) {
+    if (ids.size() == 0) {
+      return false;
     }
-
-    public Set<String> getActivePercolateTags(String index) {
-        Set<String> tags = new HashSet<String>();
-        SearchRequestBuilder searchBuilder = manager.getClient().prepareSearch("*").setIndices(index).setTypes(".percolator").setSize(1000);
-        SearchResponse response = searchBuilder.setQuery(QueryBuilders.matchAllQuery()).execute().actionGet();
-        SearchHits hits = response.getHits();
-        for(SearchHit hit : hits.getHits()) {
-            tags.add(hit.id());
-        }
-        return tags;
+    BulkRequestBuilder bulk = manager.getClient().prepareBulk();
+    for (String id : ids) {
+      bulk.add(manager.getClient().prepareDelete("_percolator", index, id));
     }
-
-    /**
-     *
-     * @param index
-     * @return
-     */
-    public boolean deleteOldQueries(String index) {
-        Set<String> tags = getActivePercolateTags(index);
-        if(tags.size() == 0) {
-            LOGGER.warn("No active tags were found in _percolator for index : {}", index);
-            return false;
-        }
-        LOGGER.info("Deleting {} tags.", tags.size());
-        BulkRequestBuilder bulk = manager.getClient().prepareBulk();
-        for(String tag : tags) {
-            bulk.add(manager.getClient().prepareDelete().setType(".percolator").setIndex(index).setId(tag));
-        }
-        BulkResponse response =bulk.execute().actionGet();
-        return !response.hasFailures();
+    return !bulk.execute().actionGet().hasFailures();
+  }
+
+  /**
+   * get active percolate tags.
+   * @param index index
+   * @return result
+   */
+  public Set<String> getActivePercolateTags(String index) {
+    Set<String> tags = new HashSet<String>();
+    SearchRequestBuilder searchBuilder = manager.getClient().prepareSearch("*").setIndices(index).setTypes(".percolator").setSize(1000);
+    SearchResponse response = searchBuilder.setQuery(QueryBuilders.matchAllQuery()).execute().actionGet();
+    SearchHits hits = response.getHits();
+    for (SearchHit hit : hits.getHits()) {
+      tags.add(hit.id());
     }
+    return tags;
+  }
+
+  /**
+   * delete old queries.
+   * @param index index
+   * @return result
+   */
+  public boolean deleteOldQueries(String index) {
+    Set<String> tags = getActivePercolateTags(index);
+    if (tags.size() == 0) {
+      LOGGER.warn("No active tags were found in _percolator for index : {}", index);
+      return false;
+    }
+    LOGGER.info("Deleting {} tags.", tags.size());
+    BulkRequestBuilder bulk = manager.getClient().prepareBulk();
+    for (String tag : tags) {
+      bulk.add(manager.getClient().prepareDelete().setType(".percolator").setIndex(index).setId(tag));
+    }
+    BulkResponse response = bulk.execute().actionGet();
+    return !response.hasFailures();
+  }
 
-    public static class PercolateQueryBuilder {
-
-        private QueryStringQueryBuilder queryBuilder;
-        private String id;
-
-        public PercolateQueryBuilder(String id, String query, String defaultPercolateField) {
-            this.id = id;
-            this.queryBuilder = new QueryStringQueryBuilder(query);
-            this.queryBuilder.defaultField(defaultPercolateField);
-        }
+  public static class PercolateQueryBuilder {
 
-        public String getId() {
-            return this.id;
-        }
+    private QueryStringQueryBuilder queryBuilder;
+    private String id;
 
-        public String getSource() {
-            return "{ \n\"query\" : "+this.queryBuilder.toString()+"\n}";
-        }
+    /**
+     * PercolateQueryBuilder constructor.
+     * @param id
+     * @param query
+     * @param defaultPercolateField
+     */
+    public PercolateQueryBuilder(String id, String query, String defaultPercolateField) {
+      this.id = id;
+      this.queryBuilder = new QueryStringQueryBuilder(query);
+      this.queryBuilder.defaultField(defaultPercolateField);
+    }
 
+    public String getId() {
+      return this.id;
     }
 
-    public enum FilterLevel {
-        MUST, SHOULD, MUST_NOT
+    public String getSource() {
+      return "{ \n\"query\" : " + this.queryBuilder.toString() + "\n}";
     }
+
+  }
+
+  public enum FilterLevel {
+    MUST, SHOULD, MUST_NOT
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessorTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessorTest.java b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessorTest.java
index f0d9c90..e252901 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessorTest.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/processor/PercolateTagProcessorTest.java
@@ -22,25 +22,30 @@ import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+/**
+ * Unit Test for
+ * @see org.apache.streams.elasticsearch.processor.PercolateTagProcessor
+ */
 public class PercolateTagProcessorTest {
-    private final String id = "test_id";
-    private final String query = "test_query";
-    private final String defaultPercolateField = "activity.content";
-
-    private final String expectedResults = "{ \n" +
-            "\"query\" : {\n" +
-            "  \"query_string\" : {\n" +
-            "    \"query\" : \"test_query\",\n" +
-            "    \"default_field\" : \"activity.content\"\n" +
-            "  }\n" +
-            "}\n" +
-            "}";
-
-    @Test
-    public void percolateTagProcessorQueryBuilderTest() {
-        PercolateTagProcessor.PercolateQueryBuilder percolateQueryBuilder = new PercolateTagProcessor.PercolateQueryBuilder(id, query, defaultPercolateField);
-
-        assertEquals(id, percolateQueryBuilder.getId());
+
+  private final String id = "test_id";
+  private final String query = "test_query";
+  private final String defaultPercolateField = "activity.content";
+
+  private final String expectedResults = "{ \n" +
+      "\"query\" : {\n" +
+      "  \"query_string\" : {\n" +
+      "    \"query\" : \"test_query\",\n" +
+      "    \"default_field\" : \"activity.content\"\n" +
+      "  }\n" +
+      "}\n" +
+      "}";
+
+  @Test
+  public void percolateTagProcessorQueryBuilderTest() {
+    PercolateTagProcessor.PercolateQueryBuilder percolateQueryBuilder = new PercolateTagProcessor.PercolateQueryBuilder(id, query, defaultPercolateField);
+
+    assertEquals(id, percolateQueryBuilder.getId());
 //        assertEquals(expectedResults, percolateQueryBuilder.getSource());
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/DatumFromMetadataProcessorIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/DatumFromMetadataProcessorIT.java b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/DatumFromMetadataProcessorIT.java
index c81d183..caa0b8d 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/DatumFromMetadataProcessorIT.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/DatumFromMetadataProcessorIT.java
@@ -18,16 +18,18 @@
 
 package org.apache.streams.elasticsearch.test;
 
-import com.google.common.collect.Maps;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
-import org.apache.commons.lang.SerializationUtils;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.elasticsearch.ElasticsearchClientManager;
 import org.apache.streams.elasticsearch.ElasticsearchReaderConfiguration;
 import org.apache.streams.elasticsearch.processor.DatumFromMetadataProcessor;
+
+import com.google.common.collect.Maps;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+
+import org.apache.commons.lang.SerializationUtils;
 import org.elasticsearch.client.Client;
 import org.junit.Assert;
 import org.junit.Before;
@@ -37,57 +39,58 @@ import java.io.File;
 import java.util.Map;
 
 /**
- * Created by sblackmon on 10/20/14.
+ * Integration Test for
+ * @see org.apache.streams.elasticsearch.processor.DatumFromMetadataProcessor
  */
 public class DatumFromMetadataProcessorIT {
 
-    private ElasticsearchReaderConfiguration testConfiguration;
-    protected Client testClient;
+  private ElasticsearchReaderConfiguration testConfiguration;
+  protected Client testClient;
 
-    @Test
-    public void testSerializability() {
-        DatumFromMetadataProcessor processor = new DatumFromMetadataProcessor(testConfiguration);
+  @Test
+  public void testSerializability() {
+    DatumFromMetadataProcessor processor = new DatumFromMetadataProcessor(testConfiguration);
 
-        DatumFromMetadataProcessor clone = (DatumFromMetadataProcessor) SerializationUtils.clone(processor);
-    }
+    DatumFromMetadataProcessor clone = (DatumFromMetadataProcessor) SerializationUtils.clone(processor);
+  }
 
-    @Before
-    public void prepareTest() throws Exception {
+  @Before
+  public void prepareTest() throws Exception {
 
-        Config reference  = ConfigFactory.load();
-        File conf_file = new File("target/test-classes/DatumFromMetadataProcessorIT.conf");
-        assert(conf_file.exists());
-        Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-        testConfiguration = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class).detectConfiguration(typesafe, "elasticsearch");
-        testClient = new ElasticsearchClientManager(testConfiguration).getClient();
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/DatumFromMetadataProcessorIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(ElasticsearchReaderConfiguration.class).detectConfiguration(typesafe, "elasticsearch");
+    testClient = new ElasticsearchClientManager(testConfiguration).getClient();
 
-    }
+  }
 
-    @Test
-    public void testDatumFromMetadataProcessor() {
+  @Test
+  public void testDatumFromMetadataProcessor() {
 
-        Map<String, Object> metadata = Maps.newHashMap();
+    Map<String, Object> metadata = Maps.newHashMap();
 
-        metadata.put("index", testConfiguration.getIndexes().get(0));
-        metadata.put("type", testConfiguration.getTypes().get(0));
-        metadata.put("id", "post");
+    metadata.put("index", testConfiguration.getIndexes().get(0));
+    metadata.put("type", testConfiguration.getTypes().get(0));
+    metadata.put("id", "post");
 
-        DatumFromMetadataProcessor processor = new DatumFromMetadataProcessor(testConfiguration);
+    DatumFromMetadataProcessor processor = new DatumFromMetadataProcessor(testConfiguration);
 
-        StreamsDatum testInput = new StreamsDatum(null);
+    StreamsDatum testInput = new StreamsDatum(null);
 
-        testInput.setMetadata(metadata);
+    testInput.setMetadata(metadata);
 
-        Assert.assertNull(testInput.document);
+    Assert.assertNull(testInput.document);
 
-        processor.prepare(null);
+    processor.prepare(null);
 
-        StreamsDatum testOutput = processor.process(testInput).get(0);
+    StreamsDatum testOutput = processor.process(testInput).get(0);
 
-        processor.cleanUp();
+    processor.cleanUp();
 
-        Assert.assertNotNull(testOutput.document);
+    Assert.assertNotNull(testOutput.document);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchITs.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchITs.java b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchITs.java
index 7c655db..b0e67a9 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchITs.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchITs.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.elasticsearch.test;
 
 import org.junit.runner.RunWith;
@@ -28,7 +29,10 @@ import org.junit.runners.Suite;
         ElasticsearchParentChildUpdaterIT.class,
         DatumFromMetadataProcessorIT.class
 })
-
+/**
+ * Integration Test Suite for
+ * @see org.apache.streams.elasticsearch
+ */
 public class ElasticsearchITs {
     // the class remains empty,
     // used only as a holder for the above annotations

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildUpdaterIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildUpdaterIT.java b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildUpdaterIT.java
index 6344028..553a711 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildUpdaterIT.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildUpdaterIT.java
@@ -74,11 +74,13 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 
 /**
- * Created by sblackmon on 10/20/14.
+ * Integration Test for
+ * @see org.apache.streams.elasticsearch.ElasticsearchPersistUpdater
+ * using parent/child associated documents.
  */
 public class ElasticsearchParentChildUpdaterIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchParentChildUpdaterIT.class);
+    private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchParentChildUpdaterIT.class);
 
     private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildWriterIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildWriterIT.java b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildWriterIT.java
index 637fdfc..6b52ce5 100644
--- a/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildWriterIT.java
+++ b/streams-contrib/streams-persist-elasticsearch/src/test/java/org/apache/streams/elasticsearch/test/ElasticsearchParentChildWriterIT.java
@@ -66,11 +66,13 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotEquals;
 
 /**
- * Created by sblackmon on 10/20/14.
+ * Integration Test for
+ * @see org.apache.streams.elasticsearch.ElasticsearchPersistWriter
+ * using parent/child associated documents.
  */
 public class ElasticsearchParentChildWriterIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ElasticsearchParentChildWriterIT.class);
+    private static final Logger LOGGER = LoggerFactory.getLogger(ElasticsearchParentChildWriterIT.class);
 
     private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 


[42/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
STREAMS-440: custom checkstyle.xml, address compliance

Squashed commit of the following:

commit bf329d31fd71a3e1fc21a76073876204ca806f88
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Wed Nov 23 09:57:31 2016 -0600

    STREAMS-440: retrieve checkstyle xml from streams-master site

commit 45e0edbcc7cfe755b520e04d2eab2fce3f28f0fb
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 19:57:17 2016 -0600

    STREAMS-440: streams-verbs

    reduce wc -l target/checkstyle_result.xml from 400 to 36

commit fb911c9653108289f00b253751dce4693e77f2d9
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 19:24:43 2016 -0600

    STREAMS-440: streams-util

    reduce wc -l target/checkstyle_result.xml from 1520 to 61

commit 887762372f97d60e2a4c1ba6ec4c55da07b81ffc
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 18:39:11 2016 -0600

    STREAMS-440: streams-schema-activitystreams2

    reduce wc -l target/checkstyle_result.xml from 78 to 6

commit f8dd9a935f063305994a75fb162af98008c92a32
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 18:36:44 2016 -0600

    STREAMS-440: streams-schema-activitystreams

    reduce wc -l target/checkstyle_result.xml from 35 to 5

commit ff50402e9e049b973f8db2eb947a892ada59fa73
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 18:34:00 2016 -0600

    STREAMS-440: streams-runtime-storm

commit bdcea2d43d04e595e15905ae604916f3b984bbf9
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 18:33:18 2016 -0600

    STREAMS-440: streams-runtime-pig

    reduce wc -l target/checkstyle_result.xml from 452 to 75

commit 010a2b8407594b156e2f94c472ebd8fe6b3e3f1f
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 18:30:32 2016 -0600

    STREAMS-440: streams-runtime-local

    reduce wc -l target/checkstyle_result.xml from 3997 to 908

commit 262657144cc57c1893f93b4340a8f791cd1a56c4
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 18:23:07 2016 -0600

    STREAMS-440: streams-runtime-dropwizard

    reduce wc -l target/checkstyle_result.xml from 311 to 19

commit 86890fc0f82106093010f1971175ed54179a0c58
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 18:13:35 2016 -0600

    STREAMS-440: streams-pojo-extensions

    reduce wc -l target/checkstyle_result.xml from 230 to 16

commit 5ff2a253b31ed774d7f9e09e7449ab7bbc19d5f6
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 18:00:37 2016 -0600

    STREAMS-440: streams-pojo

    reduce wc -l target/checkstyle_result.xml from 822 to 70

commit 10d0b8d25c469865a995448cf3433b382cf59ad3
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 17:23:36 2016 -0600

    STREAMS-440: streams-plugin-scala

    reduce wc -l target/checkstyle_result.xml from 517 to 22

commit c505110874ab097acbee5638690caed4fb353668
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 17:08:35 2016 -0600

    STREAMS-440: streams-plugin-pojo

    reduce wc -l target/checkstyle_result.xml to 19

commit f2757328f2d0db9b196ac9eb7baaecebcd9db918
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 16:58:06 2016 -0600

    STREAMS-440: streams-plugin-pig

    reduce wc -l target/checkstyle_result.xml from 422 to 20

commit 54232cd7a272f72a72f5f1ea27ffb429d0d16e8c
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 15:30:08 2016 -0600

    STREAMS-440: streams-plugin-hive

    reduce wc -l target/checkstyle_result.xml from 442 to 20

commit 89e6dbf402bd61a7628d4d760bab126b2a75cd30
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 15:14:53 2016 -0600

    STREAMS-440: streams-plugin-hbase

    reduce wc -l target/checkstyle_result.xml from 346 to 18

commit 6226f79ca23fe6ee99a8ceb9866fbc2219241c5e
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 14:57:15 2016 -0600

    STREAMS-440: streams-plugin-elasticsearch

    reduce wc -l target/checkstyle_result.xml from 522 to 23

commit 4dc32f145c3d88bcb879cc95e2d53a51badde02a
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 14:44:11 2016 -0600

    STREAMS-440: streams-plugin-cassandra

    reduce wc -l target/checkstyle_result.xml from 512 to 20

commit bebccb51b7bc90767dd768cc05453f596b032ad8
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 14:21:18 2016 -0600

    STREAMS-440: streams-monitoring

    reduce wc -l target/checkstyle_result.xml to 38

commit 9556e9840de1cfb2862fe2ba8e270cb23f268068
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 10:32:21 2016 -0600

    STREAMS-440: streams-core

    reduce wc -l target/checkstyle_result.xml from 317 to 33

commit f92579033eefe1f12fdb71cb231b5df82ab79d0b
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 10:12:49 2016 -0600

    STREAMS-440: streams-provider-youtube

    reduce wc -l target/checkstyle_result.xml from 1442 to 162

commit 29e32f7f24d0e4479e162de74bf76da55c9d6dc0
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 09:38:48 2016 -0600

    STREAMS-440: streams-provider-twitter

    reduce wc -l target/checkstyle_result.xml from 2715 to 85

commit 66bba5ced09f982d8e8c37d0f27dc5a3702e7197
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 09:37:55 2016 -0600

    STREAMS-440: streams-provider-sysomos

    reduce wc -l target/checkstyle_result.xml from 1087 to 36

commit 56f5ce34a96eb0743d81e6894984ee90d8d204e3
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Tue Nov 22 09:37:08 2016 -0600

    STREAMS-440: streams-provider-rss

    reduce wc -l target/checkstyle_result.xml from 1261 to 34

commit ea4ab54ce118da5e46462b1e0867b18fb6440088
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Mon Nov 21 18:27:40 2016 -0600

    STREAMS-440: streams-provider-moreover

    reduce wc -l target/checkstyle_result.xml from 695 to 32

commit 91573f2cccc8a44ef9efd1e3856055727b5eecb4
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sun Nov 20 17:03:13 2016 -0600

    STREAMS-440: streams-provider-instagram

    reduce wc -l target/checkstyle-result.xml from 411 to 51

commit e05113a13994eb68b56ea8a87f0e09a7f6279c65
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sun Nov 20 14:05:03 2016 -0600

    STREAMS-440: google-gplus

    reduce wc -l target/checkstyle-result.xml to 199

commit fa6704eb9887283bdeb3b0f36544db32ba920a08
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sun Nov 20 14:04:15 2016 -0600

    STREAMS-440: google-gmail

    reduce wc -l target/checkstyle-result.xml to 61

commit 7b779df2b9d08de3d0ba6c92ec023d2fdd78dab0
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:56:05 2016 +0100

    STREAMS-440: streams-provider-facebook

    reduce wc -l target/checkstyle-result.xml from 1842 to 780

commit 8d861005797b13093ce4b39eb94dcbdad0124c07
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:43:13 2016 +0100

    STREAMS-440: streams-processor-regex

    reduce wc -l target/checkstyle-result.xml from 222 to 36

commit 29ee86a7db4444a47adb2dfba5e9bc6ffaa4d0c1
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:40:19 2016 +0100

    STREAMS-440: streams-processor-peoplepattern

    reduce wc -l target/checkstyle-result.xml from 67 to 9

commit 5e96ff4e686a4e206d31976b626d1d35cd88e251
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:39:08 2016 +0100

    STREAMS-440: streams-processor-json

    reduce wc -l target/checkstyle-result.xml from 242 to 31

commit 5adb1495ea54ccd69e24ffbe5cbc191ac60d75a3
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:38:30 2016 +0100

    STREAMS-440: streams-processor-jackson

    reduce wc -l target/checkstyle-result.xml from 127 to 17

commit 43a9a7dcb660488b07a19090afedfffd5f529416
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:37:39 2016 +0100

    STREAMS-440: streams-persist-mongo

    reduce wc -l target/checkstyle-result.xml to 37

commit 50e1390047edfa4daaa2089325cf408bf3d3872a
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:37:07 2016 +0100

    STREAMS-440: streams-persist-kafka

    reduces wc -l target/checkstyle-result.xml from 233 to 31

commit b4e59d2f68742679f3ccbb65dcda5de2feb03db7
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:36:26 2016 +0100

    STREAMS-440: streams-persist-hdfs

    decrease wc -l target/checkstyle-result.xml from 508 to 58

commit 7e2b49f32ff11d0b6b5f06b1e22385a8c4fedf22
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:34:41 2016 +0100

    STREAMS-440: streams-persist-hbase

     reduce wc -l target/checkstyle-result.xml from 204 to 20

commit 676be795634a5ef712362c35df915468d6732d8d
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:33:51 2016 +0100

    STREAMS-440: streams-persist-graph

    decrease wc -l checkstyle-result.xml from 664 to 65

commit 9afe2db595736f8f4497a36870f2dd757e946835
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:31:00 2016 +0100

    STREAMS-440: streams-persist-filebuffer

commit 953f4cbb609b9c6399691989b84690cb79afc43a
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:30:28 2016 +0100

    STREAMS-440: streams-persist-elasticsearch

    decrease wc -l checkstyle-result.xml from 1572 to 131

commit fa0d73e7569e02742f0be0bdcd4871d7c0f30931
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:28:55 2016 +0100

    STREAMS-440: streams-persist-console

    decrease wc -l checkstyle-result.xml from 114 to 12

commit f8210dddbd3de79065e1ed494a69df21a2e0ff13
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:28:04 2016 +0100

    STREAMS-440: streams-persist-s3

    decrease wc -l checkstyle-result.xml from 580 to 61

commit db47e801b2901d7b1720cf6dc45646bcf5373dae
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:26:49 2016 +0100

    STREAMS-440: streams-persist-kinesis

    decrease wc -l checkstyle-result.xml from 222 to 64

commit b3b75d4e7f84b7753309652d59fa46f134977c8b
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:25:12 2016 +0100

    STREAMS-440: streams-config

    reduce wc -l target/checkstyle-result.xml from 85 to 60

commit cfa4f706f9d184aaf5e26b359b35068e2b57fc0f
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:23:16 2016 +0100

    STREAMS-440: streams-http

    decrease wc -l checkstyle-result.xml from 742 to 65

commit fe649be82ab3d81cae61e0451858d6372d3a8780
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:22:25 2016 +0100

    STREAMS-440: streams-filters

    decrease wc -l checkstyle-result.xml from 84 to 13

commit 2048f43dcff52621e16a1969efce92ee1bb7545f
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Date:   Sat Nov 19 18:14:06 2016 +0100

    STREAMS-440: streams-converters

    decrease wc -l checkstyle-result.xml from 928 to 122


Project: http://git-wip-us.apache.org/repos/asf/incubator-streams/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-streams/commit/5dffd5c3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-streams/tree/5dffd5c3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-streams/diff/5dffd5c3

Branch: refs/heads/master
Commit: 5dffd5c32d0d150727a39104d428b21b52c911d4
Parents: a726a67
Author: Steve Blackmon @steveblackmon <sb...@apache.org>
Authored: Fri Nov 25 14:24:04 2016 -0600
Committer: Steve Blackmon @steveblackmon <sb...@apache.org>
Committed: Fri Nov 25 14:24:04 2016 -0600

----------------------------------------------------------------------
 pom.xml                                         |  66 ++
 .../converter/ActivityConverterProcessor.java   |  90 +-
 .../converter/ActivityConverterUtil.java        | 311 ++++---
 .../ActivityObjectConverterProcessor.java       |  87 +-
 .../converter/ActivityObjectConverterUtil.java  | 325 +++----
 .../converter/BaseDocumentClassifier.java       |  85 +-
 .../BaseObjectNodeActivityConverter.java        | 103 ++-
 .../BaseObjectNodeActivityObjectConverter.java  |  62 +-
 .../converter/BaseStringActivityConverter.java  | 105 ++-
 .../BaseStringActivityObjectConverter.java      |  57 +-
 .../streams/converter/FieldConstants.java       |  12 +-
 .../converter/HoconConverterProcessor.java      |  80 +-
 .../streams/converter/HoconConverterUtil.java   | 177 ++--
 .../streams/converter/LineReadWriteUtil.java    | 352 ++++----
 .../converter/TypeConverterProcessor.java       |  86 +-
 .../streams/converter/TypeConverterUtil.java    |  79 +-
 .../filters/VerbDefinitionDropFilter.java       |  82 +-
 .../filters/VerbDefinitionKeepFilter.java       |  85 +-
 .../persist/SimpleHTTPPostPersistWriter.java    | 316 +++----
 .../http/processor/SimpleHTTPGetProcessor.java  | 381 ++++----
 .../http/processor/SimpleHTTPPostProcessor.java | 358 ++++----
 .../http/provider/SimpleHttpProvider.java       | 464 +++++-----
 .../java/SimpleHTTPPostPersistWriterTest.java   | 150 ++--
 .../streams/config/ComponentConfigurator.java   |  70 +-
 .../streams/config/StreamsConfigurator.java     |  79 +-
 .../config/test/ComponentConfiguratorTest.java  |   2 +-
 .../config/test/StreamsConfiguratorTest.java    |   2 +-
 .../amazon/kinesis/KinesisPersistReader.java    | 198 +++--
 .../kinesis/KinesisPersistReaderTask.java       | 116 +--
 .../amazon/kinesis/KinesisPersistWriter.java    | 134 +--
 .../streams/s3/S3ObjectInputStreamWrapper.java  | 230 ++---
 .../streams/s3/S3OutputStreamWrapper.java       | 195 ++--
 .../org/apache/streams/s3/S3PersistReader.java  | 304 +++----
 .../apache/streams/s3/S3PersistReaderTask.java  |  97 +-
 .../org/apache/streams/s3/S3PersistWriter.java  | 422 ++++-----
 .../streams/console/ConsolePersistReader.java   | 112 ++-
 .../streams/console/ConsolePersistWriter.java   |  71 +-
 .../console/ConsolePersistWriterTask.java       |  45 +-
 .../elasticsearch/ElasticsearchClient.java      |  27 +-
 .../ElasticsearchClientManager.java             | 260 +++---
 .../ElasticsearchMetadataUtil.java              | 205 +++--
 .../ElasticsearchPersistDeleter.java            | 118 +--
 .../ElasticsearchPersistReader.java             | 329 +++----
 .../ElasticsearchPersistUpdater.java            | 158 ++--
 .../ElasticsearchPersistWriter.java             | 888 ++++++++++---------
 .../elasticsearch/ElasticsearchQuery.java       | 380 ++++----
 .../DatumFromMetadataAsDocumentProcessor.java   | 141 +--
 .../processor/DatumFromMetadataProcessor.java   | 106 +--
 .../processor/DocumentToMetadataProcessor.java  |  84 +-
 .../MetadataFromDocumentProcessor.java          | 143 +--
 .../processor/PercolateTagProcessor.java        | 527 +++++------
 .../processor/PercolateTagProcessorTest.java    |  43 +-
 .../test/DatumFromMetadataProcessorIT.java      |  79 +-
 .../elasticsearch/test/ElasticsearchITs.java    |   6 +-
 .../test/ElasticsearchParentChildUpdaterIT.java |   6 +-
 .../test/ElasticsearchParentChildWriterIT.java  |   6 +-
 .../test/ElasticsearchPersistUpdaterIT.java     | 248 +++---
 .../test/ElasticsearchPersistWriterIT.java      | 113 +--
 .../test/TestMetadataFromDocumentProcessor.java | 138 ++-
 .../filebuffer/FileBufferPersistReader.java     | 210 ++---
 .../filebuffer/FileBufferPersistWriter.java     | 114 +--
 .../streams/graph/GraphHttpPersistWriter.java   | 335 +++----
 .../apache/streams/graph/GraphVertexReader.java | 112 +--
 .../apache/streams/graph/HttpGraphHelper.java   |   5 +-
 .../apache/streams/graph/QueryGraphHelper.java  |  15 +-
 .../streams/graph/neo4j/BinaryGraphHelper.java  | 109 ---
 .../streams/graph/neo4j/CypherGraphHelper.java  | 210 -----
 .../graph/neo4j/CypherQueryGraphHelper.java     | 302 ++++---
 .../graph/neo4j/Neo4jHttpGraphHelper.java       |  59 +-
 .../graph/test/TestCypherQueryGraphHelper.java  | 117 +--
 .../graph/test/TestNeo4jHttpVertexReader.java   |  56 +-
 .../streams/hbase/HbasePersistWriter.java       | 338 +++----
 .../streams/hbase/HbasePersistWriterTask.java   |  49 +-
 .../org/apache/streams/hdfs/HdfsConstants.java  |  10 +-
 .../streams/hdfs/WebHdfsPersistReader.java      | 448 +++++-----
 .../streams/hdfs/WebHdfsPersistReaderTask.java  | 138 +--
 .../streams/hdfs/WebHdfsPersistWriter.java      | 481 +++++-----
 .../streams/hdfs/WebHdfsPersistWriterTask.java  |  47 +-
 .../hdfs/test/HdfsPersistConfigTest.java        |   2 +-
 .../streams/hdfs/test/TestHdfsPersist.java      |   2 +-
 .../streams/kafka/KafkaPersistReader.java       | 202 +++--
 .../streams/kafka/KafkaPersistReaderTask.java   |  58 +-
 .../streams/kafka/KafkaPersistWriter.java       | 160 ++--
 .../streams/kafka/KafkaPersistWriterTask.java   |  49 +-
 .../streams/kafka/StreamsPartitioner.java       |  41 -
 .../streams/mongo/MongoPersistReader.java       | 358 ++++----
 .../streams/mongo/MongoPersistWriter.java       | 354 ++++----
 .../streams/mongo/test/MongoPersistIT.java      |  94 +-
 .../CleanAdditionalPropertiesProcessor.java     |  82 +-
 .../org/apache/streams/jackson/JsonUtil.java    | 167 ++++
 .../streams/jackson/TypeConverterProcessor.java | 175 ++--
 .../test/TypeConverterProcessorTest.java        | 112 +--
 .../apache/streams/json/JsonPathExtractor.java  | 189 ++--
 .../org/apache/streams/json/JsonPathFilter.java | 222 ++---
 .../json/test/JsonPathExtractorTest.java        | 108 +--
 .../peoplepattern/AccountTypeProcessor.java     |  70 +-
 .../peoplepattern/DemographicsProcessor.java    |  70 +-
 .../regex/AbstractRegexExtensionExtractor.java  | 166 ++--
 .../streams/regex/RegexHashtagExtractor.java    |  35 +-
 .../streams/regex/RegexMentionsExtractor.java   |  40 +-
 .../apache/streams/regex/RegexUrlExtractor.java |  72 +-
 .../org/apache/streams/regex/RegexUtils.java    | 115 +--
 .../regex/RegexHashtagExtractorTest.java        |   1 -
 .../regex/RegexMentionExtractorTest.java        |   1 -
 .../streams/regex/RegexUrlExtractorTest.java    |   1 -
 .../apache/streams/regex/RegexUtilsTest.java    |   2 -
 .../api/FacebookPageActivitySerializer.java     |  52 +-
 .../api/FacebookPostActivitySerializer.java     |  54 +-
 .../processor/FacebookTypeConverter.java        | 305 ++++---
 .../provider/FacebookDataCollector.java         | 203 +++--
 .../provider/FacebookEventClassifier.java       |  56 +-
 .../provider/FacebookFriendFeedProvider.java    | 435 ++++-----
 .../provider/FacebookFriendUpdatesProvider.java | 453 +++++-----
 .../facebook/provider/FacebookProvider.java     | 208 ++---
 .../FacebookUserInformationProvider.java        | 471 +++++-----
 .../provider/FacebookUserstreamProvider.java    | 484 +++++-----
 .../page/FacebookPageDataCollector.java         |  12 +-
 .../provider/page/FacebookPageProvider.java     |   7 +-
 .../pagefeed/FacebookPageFeedDataCollector.java | 178 ++--
 .../pagefeed/FacebookPageFeedProvider.java      | 137 +--
 .../serializer/FacebookActivityUtil.java        | 295 +++---
 .../FacebookStreamsPostSerializer.java          |  81 +-
 .../test/FacebookEventClassifierTest.java       |  57 +-
 .../streams/facebook/test/SimplePageTest.java   |  84 +-
 .../apache/streams/facebook/test/TestPage.java  | 240 ++---
 .../test/data/FacebookActivityActorSerDeIT.java |  48 +-
 .../test/data/FacebookActivitySerDeIT.java      |  54 +-
 .../facebook/test/data/FacebookPageSerDeIT.java |  56 +-
 .../facebook/test/data/FacebookPostSerDeIT.java |  80 +-
 .../test/providers/TestFacebookProvider.java    |  98 +-
 .../providers/page/FacebookPageProviderIT.java  |  58 +-
 .../pagefeed/FacebookPageFeedProviderIT.java    |  53 +-
 .../gmail/provider/GMailImapProviderTask.java   |  41 +-
 .../GMailMessageActivitySerializer.java         | 270 +++---
 .../google/gmail/provider/GMailProvider.java    | 210 ++---
 .../gmail/provider/GMailRssProviderTask.java    |  55 --
 .../gmail/test/GMailMessageSerDeTest.java       |  47 +-
 .../processor/GooglePlusCommentProcessor.java   | 113 +--
 .../processor/GooglePlusTypeConverter.java      | 188 ++--
 .../gplus/provider/AbstractGPlusProvider.java   | 366 ++++----
 .../gplus/provider/GPlusActivitySerializer.java |  60 +-
 .../gplus/provider/GPlusDataCollector.java      |  73 +-
 .../gplus/provider/GPlusEventProcessor.java     |  93 --
 .../provider/GPlusUserActivityCollector.java    | 177 ++--
 .../provider/GPlusUserActivityProvider.java     | 143 +--
 .../gplus/provider/GPlusUserDataCollector.java  | 109 +--
 .../gplus/provider/GPlusUserDataProvider.java   | 151 ++--
 .../util/GPlusActivityDeserializer.java         | 244 ++---
 .../util/GPlusCommentDeserializer.java          | 125 +--
 .../serializer/util/GPlusEventClassifier.java   |  58 +-
 .../util/GPlusPersonDeserializer.java           | 142 +--
 .../serializer/util/GooglePlusActivityUtil.java | 469 +++++-----
 .../google/gplus/GooglePlusCommentSerDeIT.java  | 137 +--
 .../google/gplus/GooglePlusPersonSerDeIT.java   | 117 +--
 .../processor/GooglePlusActivitySerDeIT.java    | 142 +--
 .../processor/GooglePlusTypeConverterTest.java  | 150 ++--
 .../provider/TestAbstractGPlusProvider.java     | 107 +--
 .../TestGPlusUserActivityCollector.java         | 435 ++++-----
 .../provider/TestGPlusUserDataCollector.java    | 190 ++--
 .../util/GPlusEventClassifierTest.java          |  69 +-
 .../providers/GPlusUserActivityProviderIT.java  |  52 +-
 .../test/providers/GPlusUserDataProviderIT.java |  54 +-
 .../processor/InstagramTypeConverter.java       |  95 +-
 .../provider/InstagramAbstractProvider.java     | 350 ++++----
 .../provider/InstagramDataCollector.java        | 198 ++---
 .../instagram/provider/InstagramOauthToken.java |  28 +-
 .../InstagramRecentMediaCollector.java          | 131 +--
 .../InstagramRecentMediaProvider.java           | 163 ++--
 .../userinfo/InstagramUserInfoCollector.java    |  96 +-
 .../userinfo/InstagramUserInfoProvider.java     | 169 ++--
 .../InstagramMediaFeedDataConverter.java        |  66 +-
 .../InstagramUserInfoDataConverter.java         |  87 +-
 .../serializer/util/InstagramActivityUtil.java  | 557 ++++++------
 .../data/InstagramMediaFeedDataConverterIT.java |  96 +-
 .../data/InstagramUserInfoDataConverterIT.java  | 103 ++-
 .../InstagramRecentMediaProviderIT.java         |  57 +-
 .../providers/InstagramUserInfoProviderIT.java  |  53 +-
 .../apache/streams/moreover/MoreoverClient.java | 135 +--
 .../MoreoverJsonActivitySerializer.java         | 104 +--
 .../streams/moreover/MoreoverProvider.java      | 266 +++---
 .../streams/moreover/MoreoverProviderTask.java  | 112 +--
 .../apache/streams/moreover/MoreoverResult.java | 283 +++---
 .../moreover/MoreoverResultSetWrapper.java      |  32 -
 .../apache/streams/moreover/MoreoverUtils.java  | 264 +++---
 .../moreover/MoreoverXmlActivitySerializer.java | 118 +--
 .../streams/moreover/MoreoverTestUtil.java      |  32 +-
 .../test/MoreoverJsonActivitySerializerIT.java  |  65 +-
 .../test/MoreoverXmlActivitySerializerIT.java   |  49 +-
 .../test/provider/MoreoverProviderIT.java       |  41 +-
 .../streams/rss/processor/RssTypeConverter.java |  69 +-
 .../rss/provider/RssEventClassifier.java        |  32 -
 .../streams/rss/provider/RssEventProcessor.java | 146 +--
 .../streams/rss/provider/RssStreamProvider.java | 331 ++++---
 .../rss/provider/RssStreamProviderTask.java     | 368 ++++----
 .../provider/perpetual/RssFeedScheduler.java    | 144 +--
 .../serializer/SyndEntryActivitySerializer.java | 353 ++++----
 .../rss/serializer/SyndEntrySerializer.java     | 482 +++++-----
 .../rss/provider/RssStreamProviderTaskIT.java   | 205 ++---
 .../rss/provider/RssStreamProviderTest.java     | 143 +--
 .../perpetual/RssFeedSchedulerTest.java         | 101 +--
 .../streams/rss/test/RssStreamProviderIT.java   | 116 ++-
 .../streams/rss/test/RssTypeConverterTest.java  |  13 +-
 .../rss/test/SyndEntryActivitySerializerIT.java | 143 +--
 .../streams/sysomos/SysomosException.java       |  56 +-
 .../SysomosBeatActivityConverter.java           | 197 ++--
 .../streams/sysomos/data/HeartbeatInfo.java     | 182 ++--
 .../sysomos/data/SysomosTagDefinition.java      | 114 +--
 .../sysomos/processor/SysomosTypeConverter.java |  51 +-
 .../provider/AbstractRequestBuilder.java        |  41 +-
 .../sysomos/provider/ContentRequestBuilder.java | 187 ++--
 .../sysomos/provider/RequestBuilder.java        | 119 +--
 .../streams/sysomos/provider/SysomosClient.java |  27 +-
 .../provider/SysomosHeartbeatStream.java        | 383 ++++----
 .../sysomos/provider/SysomosProvider.java       | 605 +++++++------
 .../streams/sysomos/util/SysomosUtils.java      |  83 +-
 .../com/sysomos/test/SysomosJsonSerDeIT.java    |  47 +-
 .../com/sysomos/test/SysomosXmlSerDeIT.java     |  96 +-
 .../test/provider/SysomosProviderIT.java        |  54 +-
 .../twitter/converter/StreamsTwitterMapper.java |  86 +-
 .../converter/TwitterDateTimeFormat.java        |  13 +-
 .../converter/TwitterDocumentClassifier.java    |  81 +-
 .../TwitterFollowActivityConverter.java         | 104 +--
 .../TwitterJsonDeleteActivityConverter.java     |  82 +-
 .../TwitterJsonRetweetActivityConverter.java    | 101 ++-
 .../TwitterJsonTweetActivityConverter.java      |  81 +-
 .../TwitterJsonUserActivityConverter.java       |  82 +-
 .../TwitterJsonUserActivityObjectConverter.java |  49 +-
 ...terJsonUserstreameventActivityConverter.java | 179 ++--
 .../converter/util/TwitterActivityUtil.java     | 603 +++++++------
 .../FetchAndReplaceTwitterProcessor.java        | 242 ++---
 .../processor/TwitterEventProcessor.java        |  83 --
 .../processor/TwitterProfileProcessor.java      | 140 ---
 .../twitter/processor/TwitterTypeConverter.java |   5 +-
 .../processor/TwitterUrlApiProcessor.java       |  80 +-
 .../twitter/provider/TwitterErrorHandler.java   | 197 ++--
 .../provider/TwitterEventClassifier.java        |  74 --
 .../provider/TwitterFollowingProvider.java      | 284 +++---
 .../provider/TwitterFollowingProviderTask.java  | 341 +++----
 .../twitter/provider/TwitterProviderUtil.java   |  34 +-
 .../twitter/provider/TwitterStreamHelper.java   | 108 +++
 .../provider/TwitterStreamProcessor.java        |  99 ---
 .../twitter/provider/TwitterStreamProvider.java | 510 +++++------
 .../provider/TwitterTimelineProvider.java       | 522 +++++------
 .../provider/TwitterTimelineProviderTask.java   | 135 +--
 .../TwitterUserInformationProvider.java         | 621 +++++++------
 .../test/data/TwitterObjectMapperIT.java        | 130 +--
 .../providers/TwitterFollowingProviderIT.java   |  53 +-
 .../test/providers/TwitterStreamProviderIT.java |  53 +-
 .../providers/TwitterTimelineProviderIT.java    |  53 +-
 .../TwitterUserInformationProviderIT.java       |  53 +-
 .../utils/TwitterActivityConvertersTest.java    | 161 ++--
 .../TwitterActivityObjectsConvertersTest.java   |  52 +-
 .../utils/TwitterDocumentClassifierTest.java    |  86 +-
 .../youtube/processor/YoutubeTypeConverter.java | 168 ++--
 .../provider/YoutubeChannelDataCollector.java   | 123 +--
 .../provider/YoutubeChannelProvider.java        | 156 ++--
 .../youtube/provider/YoutubeDataCollector.java  |  71 +-
 .../com/youtube/provider/YoutubeProvider.java   | 405 ++++-----
 .../provider/YoutubeUserActivityCollector.java  | 335 +++----
 .../provider/YoutubeUserActivityProvider.java   | 155 ++--
 .../youtube/serializer/YoutubeActivityUtil.java | 300 ++++---
 .../serializer/YoutubeChannelDeserializer.java  | 185 ++--
 .../serializer/YoutubeEventClassifier.java      |  54 +-
 .../serializer/YoutubeVideoDeserializer.java    | 152 ++--
 .../processor/YoutubeTypeConverterTest.java     | 121 +--
 .../YoutubeChannelDataCollectorTest.java        | 105 +--
 .../youtube/provider/YoutubeProviderTest.java   | 206 ++---
 .../YoutubeUserActivityCollectorTest.java       | 495 ++++++-----
 .../serializer/YoutubeEventClassifierTest.java  |  45 +-
 .../serializer/YoutubeVideoSerDeTest.java       | 125 +--
 .../providers/YoutubeChannelProviderIT.java     |  54 +-
 .../YoutubeUserActivityProviderIT.java          |  54 +-
 .../org/apache/streams/core/DatumStatus.java    |   9 +-
 .../streams/core/DatumStatusCountable.java      |   4 +-
 .../apache/streams/core/DatumStatusCounter.java | 163 ++--
 .../org/apache/streams/core/StreamBuilder.java  | 153 ++--
 .../org/apache/streams/core/StreamHandler.java  |  40 -
 .../org/apache/streams/core/StreamState.java    |  28 -
 .../org/apache/streams/core/StreamsDatum.java   | 262 +++---
 .../org/apache/streams/core/StreamsFilter.java  |  36 -
 .../apache/streams/core/StreamsOperation.java   |  34 +-
 .../streams/core/StreamsPersistReader.java      |  10 +-
 .../streams/core/StreamsPersistWriter.java      |  15 +-
 .../apache/streams/core/StreamsProcessor.java   |  16 +-
 .../apache/streams/core/StreamsProvider.java    |  65 +-
 .../apache/streams/core/StreamsResultSet.java   |  51 +-
 .../apache/streams/core/util/DatumUtils.java    |  49 +-
 .../jackson/DatumStatusCounterDeserializer.java |  73 +-
 .../jackson/MemoryUsageDeserializer.java        |  77 +-
 .../jackson/StreamsTaskCounterDeserializer.java | 100 ++-
 .../jackson/ThroughputQueueDeserializer.java    |  95 +-
 .../monitoring/persist/MessagePersister.java    |  17 +-
 .../persist/impl/BroadcastMessagePersister.java |  99 ++-
 .../impl/LogstashUdpMessagePersister.java       | 119 +--
 .../persist/impl/SLF4JMessagePersister.java     |  43 -
 .../persist/impl/Slf4jMessagePersister.java     |  49 +
 .../tasks/BroadcastMonitorThread.java           | 293 +++---
 .../jackson/MemoryUsageDeserializerTest.java    |  71 +-
 .../impl/BroadcastMessagePersisterTest.java     |  45 +-
 .../impl/LogstashUdpMessagePersisterTest.java   |  63 +-
 .../tasks/BroadcastMonitorThreadTest.java       |  78 +-
 .../StreamsCassandraGenerationConfig.java       | 101 ++-
 .../StreamsCassandraResourceGenerator.java      | 602 +++++++------
 .../StreamsCassandraResourceGeneratorMojo.java  |  74 +-
 ...treamsCassandraResourceGeneratorCLITest.java |  46 +-
 ...StreamsCassandraResourceGeneratorMojoIT.java |  70 +-
 .../StreamsCassandraResourceGeneratorTest.java  |  86 +-
 .../StreamsElasticsearchGenerationConfig.java   | 101 ++-
 .../StreamsElasticsearchResourceGenerator.java  | 614 +++++++------
 ...reamsElasticsearchResourceGeneratorMojo.java |  78 +-
 ...msElasticsearchResourceGeneratorCLITest.java |  36 +-
 ...amsElasticsearchResourceGeneratorMojoIT.java |  42 +-
 ...reamsElasticsearchResourceGeneratorTest.java | 150 ++--
 .../hbase/StreamsHbaseGenerationConfig.java     | 134 +--
 .../hbase/StreamsHbaseResourceGenerator.java    | 274 +++---
 .../StreamsHbaseResourceGeneratorMojo.java      |  84 +-
 .../StreamsHbaseResourceGeneratorCLITest.java   |  37 +-
 .../StreamsHbaseResourceGeneratorMojoIT.java    |  57 +-
 .../test/StreamsHbaseResourceGeneratorTest.java | 125 +--
 .../hive/StreamsHiveGenerationConfig.java       | 100 +--
 .../hive/StreamsHiveResourceGenerator.java      | 501 ++++++-----
 .../hive/StreamsHiveResourceGeneratorMojo.java  |  66 +-
 .../StreamsHiveResourceGeneratorCLITest.java    |  37 +-
 .../StreamsHiveResourceGeneratorMojoIT.java     |  57 +-
 .../test/StreamsHiveResourceGeneratorTest.java  | 130 ++-
 .../plugins/pig/StreamsPigGenerationConfig.java | 124 +--
 .../pig/StreamsPigResourceGenerator.java        | 491 +++++-----
 .../pig/StreamsPigResourceGeneratorMojo.java    |  66 +-
 .../src/site/markdown/index.md                  |   2 +-
 .../StreamsPigResourceGeneratorCLITest.java     |  35 +-
 .../test/StreamsPigResourceGeneratorMojoIT.java |  57 +-
 .../test/StreamsPigResourceGeneratorTest.java   | 160 ++--
 .../plugins/StreamsPojoGenerationConfig.java    | 161 ++--
 .../plugins/StreamsPojoSourceGenerator.java     | 108 ++-
 .../plugins/StreamsPojoSourceGeneratorMojo.java | 103 +--
 .../test/StreamsPojoSourceGeneratorCLITest.java |  37 +-
 .../test/StreamsPojoSourceGeneratorMojoIT.java  |  57 +-
 .../test/StreamsPojoSourceGeneratorTest.java    |  89 +-
 .../plugins/StreamsScalaGenerationConfig.java   |  51 +-
 .../plugins/StreamsScalaSourceGenerator.java    | 609 +++++++------
 .../StreamsScalaSourceGeneratorMojo.java        |  83 +-
 .../StreamsScalaSourceGeneratorCLITest.java     |  39 +-
 .../test/StreamsScalaSourceGeneratorMojoIT.java |  51 +-
 .../test/StreamsScalaSourceGeneratorTest.java   | 110 +--
 .../apache/streams/data/util/PropertyUtil.java  | 159 ++--
 .../streams/pojo/extensions/ExtensionUtil.java  | 259 +++---
 .../pojo/extensions/test/ExtensionUtilTest.java |  35 +-
 .../apache/streams/data/ActivityConverter.java  |  80 +-
 .../streams/data/ActivityObjectConverter.java   |  60 +-
 .../apache/streams/data/ActivitySerializer.java |  57 +-
 .../apache/streams/data/DocumentClassifier.java |  16 +-
 .../apache/streams/data/util/ActivityUtil.java  | 211 +++--
 .../org/apache/streams/data/util/JsonUtil.java  | 168 ----
 .../apache/streams/data/util/RFC3339Utils.java  | 325 +++----
 .../exceptions/ActivityConversionException.java |  26 +-
 .../ActivityDeserializerException.java          |  45 -
 .../exceptions/ActivitySerializerException.java |  33 +-
 .../jackson/StreamsDateTimeDeserializer.java    |  56 +-
 .../streams/jackson/StreamsDateTimeFormat.java  |   5 +-
 .../jackson/StreamsDateTimeSerializer.java      |  19 +-
 .../streams/jackson/StreamsJacksonMapper.java   | 198 +++--
 .../streams/jackson/StreamsJacksonModule.java   |  58 +-
 .../jackson/StreamsPeriodDeserializer.java      |  18 +-
 .../jackson/StreamsPeriodSerializer.java        |  22 +-
 .../streams/pojo/test/ActivitySerDeTest.java    | 107 +--
 .../pojo/test/CustomDateTimeFormatTest.java     |  65 +-
 .../streams/pojo/test/DateTimeSerDeTest.java    |  75 +-
 .../streams/pojo/test/RFC3339UtilsTest.java     | 372 ++++----
 .../dropwizard/GenericWebhookResource.java      | 302 ++++---
 .../dropwizard/StreamDropwizardBuilder.java     |  45 +-
 .../streams/dropwizard/StreamsApplication.java  | 200 ++---
 .../dropwizard/StreamsDropwizardModule.java     |  32 +-
 .../test/GenericWebhookResourceTest.java        |  96 +-
 .../dropwizard/test/StreamsApplicationIT.java   |  22 +-
 .../local/builders/InvalidStreamException.java  |  24 +-
 .../local/builders/LocalStreamBuilder.java      | 867 +++++++++---------
 .../streams/local/builders/StreamComponent.java | 479 +++++-----
 .../local/counters/DatumStatusCounter.java      |  85 +-
 .../counters/DatumStatusCounterMXBean.java      |  30 +-
 .../local/counters/StreamsTaskCounter.java      | 256 +++---
 .../counters/StreamsTaskCounterMXBean.java      |  74 +-
 ...amOnUnhandleThrowableThreadPoolExecutor.java |  57 +-
 .../streams/local/queues/ThroughputQueue.java   | 828 ++++++++---------
 .../local/queues/ThroughputQueueMXBean.java     |  66 +-
 .../streams/local/tasks/BaseStreamsTask.java    | 318 +++----
 .../tasks/LocalStreamProcessMonitorThread.java  | 108 +--
 .../tasks/StatusCounterMonitorRunnable.java     |   4 +-
 .../local/tasks/StatusCounterMonitorThread.java |  81 +-
 .../streams/local/tasks/StreamsMergeTask.java   |  88 +-
 .../local/tasks/StreamsPersistWriterTask.java   | 269 +++---
 .../local/tasks/StreamsProcessorTask.java       | 270 +++---
 .../local/tasks/StreamsProviderTask.java        | 389 ++++----
 .../apache/streams/local/tasks/StreamsTask.java |  82 +-
 .../local/builders/LocalStreamBuilderTest.java  | 615 ++++++-------
 .../local/builders/ToyLocalBuilderExample.java  |  22 +-
 .../local/counters/DatumStatusCounterTest.java  | 170 ++--
 .../local/counters/StreamsTaskCounterTest.java  | 234 ++---
 ...nhandledThrowableThreadPoolExecutorTest.java | 165 ++--
 .../queues/ThroughputQueueMultiThreadTest.java  | 487 +++++-----
 .../queues/ThroughputQueueSingleThreadTest.java | 407 ++++-----
 .../streams/local/tasks/BasicTasksTest.java     | 499 +++++------
 .../local/tasks/StreamsProviderTaskTest.java    | 226 ++---
 .../test/processors/DoNothingProcessor.java     |  45 +-
 .../PassthroughDatumCounterProcessor.java       | 127 +--
 .../local/test/processors/SlowProcessor.java    |  41 +-
 .../test/providers/EmptyResultSetProvider.java  |  67 +-
 .../test/providers/NumericMessageProvider.java  | 138 ++-
 .../local/test/writer/DatumCounterWriter.java   | 139 +--
 .../local/test/writer/DoNothingWriter.java      |  31 +-
 .../local/test/writer/SystemOutWriter.java      |  33 +-
 .../component/ExpectedDatumsPersistWriter.java  |  71 +-
 .../test/component/FileReaderProvider.java      | 117 +--
 .../test/component/StreamsDatumConverter.java   |   2 +-
 .../component/StringToDocumentConverter.java    |   8 +-
 .../tests/TestComponentsLocalStream.java        |  31 +-
 .../tests/TestExpectedDatumsPersitWriter.java   |  48 +-
 .../component/tests/TestFileReaderProvider.java |  44 +-
 .../streams/pig/StreamsComponentFactory.java    |  57 +-
 .../apache/streams/pig/StreamsPigBuilder.java   |  88 --
 .../streams/pig/StreamsProcessDatumExec.java    | 226 ++---
 .../streams/pig/StreamsProcessDocumentExec.java | 100 +--
 .../org/apache/streams/pig/StreamsStorage.java  |  30 -
 .../streams/pig/test/AppendStringProcessor.java |  53 +-
 .../streams/pig/test/CopyThriceProcessor.java   |  51 +-
 .../streams/pig/test/DoNothingProcessor.java    |  49 +-
 .../streams/pig/test/PigConverterTest.java      |  35 +-
 .../streams/pig/test/PigProcessDatumTest.java   |  84 +-
 .../pig/test/PigProcessDocumentTest.java        | 113 +--
 streams-runtimes/streams-runtime-storm/pom.xml  | 124 ---
 .../test/SchemaValidationTest.java              |  65 +-
 .../activitystreams/test/ExamplesSerDeIT.java   | 189 ++--
 .../org/apache/streams/util/ComponentUtils.java | 178 ++--
 .../java/org/apache/streams/util/DateUtil.java  | 192 ----
 .../java/org/apache/streams/util/GuidUtils.java |  28 +-
 .../apache/streams/util/SerializationUtil.java  |  97 +-
 .../backoff/AbstractBackOffStrategy.java        | 101 ++-
 .../api/requests/backoff/BackOffException.java  |  84 +-
 .../api/requests/backoff/BackOffStrategy.java   |  24 +-
 .../impl/ConstantTimeBackOffStrategy.java       |  40 +-
 .../impl/ExponentialBackOffStrategy.java        |  41 +-
 .../backoff/impl/LinearTimeBackOffStrategy.java |  21 +-
 .../util/oauth/tokens/AbstractOauthToken.java   |  23 +-
 .../tokens/tokenmanager/SimpleTokenManager.java |  56 +-
 .../tokenmanager/impl/BasicTokenManager.java    |  94 ++
 .../tokenmanager/impl/BasicTokenManger.java     |  86 --
 .../apache/streams/util/schema/FieldType.java   |  13 +-
 .../apache/streams/util/schema/FieldUtil.java   |  49 +-
 .../apache/streams/util/schema/FileUtil.java    | 120 ++-
 .../streams/util/schema/GenerationConfig.java   | 134 +--
 .../org/apache/streams/util/schema/Schema.java  |  89 +-
 .../apache/streams/util/schema/SchemaStore.java |  30 +-
 .../streams/util/schema/SchemaStoreImpl.java    | 594 +++++++------
 .../apache/streams/util/schema/SchemaUtil.java  |  54 +-
 .../org/apache/streams/util/schema/URIUtil.java |  47 -
 .../org/apache/streams/util/schema/UriUtil.java |  56 ++
 .../requests/backoff/BackOffStrategyTest.java   |  93 +-
 .../ConstantTimeBackOffStrategyTest.java        |  30 +-
 .../backoff/ExponentialBackOffStrategyTest.java |  21 +-
 .../backoff/LinearTimeBackOffStartegyTest.java  |  40 -
 .../backoff/LinearTimeBackOffStrategyTest.java  |  41 +
 .../streams/util/files/StreamsScannerUtil.java  |  16 +-
 .../tokenmanager/TestBasicTokenManager.java     | 293 +++---
 .../util/schema/test/SchemaOrderingTest.java    | 252 +++---
 .../util/schema/test/SchemaStoreTest.java       |  95 +-
 .../verbs/ObjectCombinationGenericOrdering.java |  95 +-
 .../ObjectCombinationSpecificOrdering.java      | 109 ++-
 .../streams/verbs/VerbDefinitionMatchUtil.java  |  46 +-
 .../streams/verbs/VerbDefinitionResolver.java   | 168 ++--
 .../verbs/VerbDefinitionTemplateUtil.java       |  73 +-
 .../ObjectCombinationGenericOrderingTest.java   |  18 +-
 .../ObjectCombinationSpecificOrderingTest.java  |  31 +-
 .../verbs/VerbDefinitionResolverTest.java       | 151 ++--
 .../verbs/VerbDefinitionTemplateTest.java       |  97 +-
 .../streams/verbs/VerbDefinitionTest.java       |  73 +-
 474 files changed, 34219 insertions(+), 33006 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5e7ca2f..2af57be 100644
--- a/pom.xml
+++ b/pom.xml
@@ -183,6 +183,13 @@
     <build>
         <plugins>
             <plugin>
+                <artifactId>maven-checkstyle-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.scalastyle</groupId>
+                <artifactId>scalastyle-maven-plugin</artifactId>
+            </plugin>
+            <plugin>
                 <artifactId>maven-compiler-plugin</artifactId>
             </plugin>
             <plugin>
@@ -212,6 +219,60 @@
                         </filesets>
                     </configuration>
                 </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-checkstyle-plugin</artifactId>
+                    <version>${checkstyle.plugin.version}</version>
+                    <dependencies>
+                        <dependency>
+                            <groupId>com.puppycrawl.tools</groupId>
+                            <artifactId>checkstyle</artifactId>
+                            <version>7.2</version>
+                        </dependency>
+                    </dependencies>
+                    <executions>
+                        <execution>
+                            <id>validate</id>
+                            <phase>validate</phase>
+                            <configuration>
+                                <configLocation>http://streams.incubator.apache.org/site/${project.version}/streams-master/streams-java-checkstyle.xml</configLocation>
+                                <encoding>UTF-8</encoding>
+                                <consoleOutput>true</consoleOutput>
+                                <includeTestSourceDirectory>true</includeTestSourceDirectory>
+                                <testSourceDirectory>${project.basedir}/src/test/java</testSourceDirectory>
+                                <failsOnError>false</failsOnError>
+                            </configuration>
+                            <goals>
+                                <goal>check</goal>
+                            </goals>
+                        </execution>
+                    </executions>
+                </plugin>
+                <plugin>
+                    <groupId>org.scalastyle</groupId>
+                    <artifactId>scalastyle-maven-plugin</artifactId>
+                    <version>${scalastyle.plugin.version}</version>
+                    <executions>
+                        <execution>
+                            <id>validate</id>
+                            <phase>validate</phase>
+                            <configuration>
+                                <verbose>false</verbose>
+                                <failOnViolation>false</failOnViolation>
+                                <includeTestSourceDirectory>true</includeTestSourceDirectory>
+                                <failOnWarning>false</failOnWarning>
+                                <sourceDirectory>${project.basedir}/src/main/scala</sourceDirectory>
+                                <testSourceDirectory>${project.basedir}/src/test/scala</testSourceDirectory>
+                                <!--<configLocation>https://raw.githubusercontent.com/databricks/sbt-databricks/master/scalastyle-config.xml</configLocation>-->
+                                <outputFile>${project.build.directory}/scalastyle-output.xml</outputFile>
+                                <outputEncoding>UTF-8</outputEncoding>
+                            </configuration>
+                            <goals>
+                                <goal>check</goal>
+                            </goals>
+                        </execution>
+                    </executions>
+                </plugin>
             </plugins>
         </pluginManagement>
     </build>
@@ -244,6 +305,11 @@
             <groupId>org.slf4j</groupId>
             <artifactId>slf4j-api</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.streams</groupId>
+            <artifactId>streams-master</artifactId>
+            <version>${project.version}</version>
+        </dependency>
     </dependencies>
 
     <reporting>

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterProcessor.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterProcessor.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterProcessor.java
index 0e3e92d..79b5d56 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterProcessor.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterProcessor.java
@@ -19,11 +19,13 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.core.util.DatumUtils;
 import org.apache.streams.pojo.json.Activity;
+
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -33,72 +35,74 @@ import java.util.List;
  * ActivityConverterProcessor is a utility processor for converting any datum document
  * to an Activity.
  *
+ * <p/>
  * By default it will handle string json and objectnode representation of existing Activities,
  * translating them into the POJO representation(s) preferred by each registered/detected
  * ActivityConverter.
  *
+ * <p/>
  * To use this capability without a dedicated stream processor, just use ActivityConverterUtil.
  */
 public class ActivityConverterProcessor implements StreamsProcessor {
 
-    public static final String STREAMS_ID = "ActivityConverterProcessor";
+  public static final String STREAMS_ID = "ActivityConverterProcessor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ActivityConverterProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ActivityConverterProcessor.class);
 
-    private ActivityConverterUtil converterUtil;
+  private ActivityConverterUtil converterUtil;
 
-    private ActivityConverterProcessorConfiguration configuration;
+  private ActivityConverterProcessorConfiguration configuration;
 
-    public ActivityConverterProcessor() {
-    }
+  public ActivityConverterProcessor() {
+  }
 
-    public ActivityConverterProcessor(ActivityConverterProcessorConfiguration configuration) {
-        this.configuration = configuration;
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public ActivityConverterProcessor(ActivityConverterProcessorConfiguration configuration) {
+    this.configuration = configuration;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        List<StreamsDatum> result = Lists.newLinkedList();
-        Object document = entry.getDocument();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        try {
+    List<StreamsDatum> result = Lists.newLinkedList();
+    Object document = entry.getDocument();
 
-            // first determine which classes this document might actually be
-            List<Activity> activityList = converterUtil.convert(document);
+    try {
 
-            for (Activity activity : activityList) {
-                StreamsDatum datum = DatumUtils.cloneDatum(entry);
-                datum.setId(activity.getId());
-                datum.setDocument(activity);
-                result.add(datum);
-            }
+      // first determine which classes this document might actually be
+      List<Activity> activityList = converterUtil.convert(document);
 
-        } catch( Exception e ) {
-            LOGGER.warn("General exception in process! " + e.getMessage());
-        } finally {
-            return result;
-        }
+      for (Activity activity : activityList) {
+        StreamsDatum datum = DatumUtils.cloneDatum(entry);
+        datum.setId(activity.getId());
+        datum.setDocument(activity);
+        result.add(datum);
+      }
 
+    } catch (Exception ex) {
+      LOGGER.warn("General exception in process! " + ex.getMessage());
+    } finally {
+      return result;
     }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        if( configurationObject instanceof ActivityConverterProcessorConfiguration)
-            converterUtil = ActivityConverterUtil.getInstance((ActivityConverterProcessorConfiguration)configurationObject);
-        else
-            converterUtil = ActivityConverterUtil.getInstance();
+  }
 
+  @Override
+  public void prepare(Object configurationObject) {
+    if (configurationObject instanceof ActivityConverterProcessorConfiguration) {
+      converterUtil = ActivityConverterUtil.getInstance((ActivityConverterProcessorConfiguration) configurationObject);
+    } else {
+      converterUtil = ActivityConverterUtil.getInstance();
     }
+  }
 
-    @Override
-    public void cleanUp() {
+  @Override
+  public void cleanUp() {
 
-    }
+  }
 
-};
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterUtil.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterUtil.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterUtil.java
index b42eb71..1be527b 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterUtil.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityConverterUtil.java
@@ -19,14 +19,14 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import java.util.concurrent.ConcurrentHashMap;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.data.DocumentClassifier;
 import org.apache.streams.data.util.ActivityUtil;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.Activity;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
 import org.reflections.Reflections;
 import org.reflections.scanners.SubTypesScanner;
 import org.reflections.util.ClasspathHelper;
@@ -41,204 +41,221 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
 
 /**
  * ActivityConverterUtil converts document into all possible Activity
  * representations based on registered DocumentClassifiers and ActivityConverters.
  *
+ * <p/>
  * Implementations and contributed modules may implement DocumentClassifiers
  * and ActivityConverters to translate additional document types into desired
  * Activity formats.
  *
+ * <p/>
  * A DocumentClassifier's reponsibility is to recognize document formats and label them,
  * using a jackson-compatible POJO class.
  *
+ * <p/>
  * An ActivityConverter's reponsibility is to converting a raw document associated with an
  * incoming POJO class into an activity.
  *
  */
 public class ActivityConverterUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ActivityConverterUtil.class);
-
-    private static final ActivityConverterUtil INSTANCE = new ActivityConverterUtil();
-
-    public static ActivityConverterUtil getInstance(){
-        return INSTANCE;
-    }
-
-    public static ActivityConverterUtil getInstance(ActivityConverterProcessorConfiguration configuration){
-        return new ActivityConverterUtil(configuration);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ActivityConverterUtil.class);
+
+  private static final ActivityConverterUtil INSTANCE = new ActivityConverterUtil();
+
+  public static ActivityConverterUtil getInstance() {
+    return INSTANCE;
+  }
+
+  public static ActivityConverterUtil getInstance(ActivityConverterProcessorConfiguration configuration) {
+    return new ActivityConverterUtil(configuration);
+  }
+
+  private List<DocumentClassifier> classifiers = new LinkedList<>();
+  private List<ActivityConverter> converters = new LinkedList<>();
+
+  /*
+    Use getInstance to get a globally shared thread-safe ActivityConverterUtil,
+    rather than call this constructor.  Reflection-based resolution of
+    converters across all modules can be slow and should only happen
+    once per JVM.
+   */
+  protected ActivityConverterUtil() {
+    configure();
+  }
+
+  protected ActivityConverterUtil(ActivityConverterProcessorConfiguration configuration) {
+    classifiers = configuration.getClassifiers();
+    converters = configuration.getConverters();
+    configure();
+  }
+
+  /**
+   * convert document to activity.
+   *
+   * @param document document to convert
+   * @return result
+   */
+  public List<Activity> convert(Object document) {
+
+    List<Activity> result = new ArrayList<>();
+
+    List<Class> detectedClasses = detectClasses(document);
+
+    if ( detectedClasses.size() == 0 ) {
+      LOGGER.warn("Unable to classify");
+      return null;
+    } else {
+      LOGGER.debug("Classified document as " + detectedClasses);
     }
 
-    private List<DocumentClassifier> classifiers = new LinkedList<>();
-    private List<ActivityConverter> converters = new LinkedList<>();
-
-    /*
-      Use getInstance to get a globally shared thread-safe ActivityConverterUtil,
-      rather than call this constructor.  Reflection-based resolution of
-      converters across all modules can be slow and should only happen
-      once per JVM.
-     */
-    protected ActivityConverterUtil() {
-        configure();
-    }
+    // for each of these classes:
+    //   use TypeUtil to switch the document to that type
+    Map<Class, Object> typedDocs = convertToDetectedClasses(detectedClasses, document);
 
-    protected ActivityConverterUtil(ActivityConverterProcessorConfiguration configuration) {
-        classifiers = configuration.getClassifiers();
-        converters = configuration.getConverters();
-        configure();
+    if ( typedDocs.size() == 0 ) {
+      LOGGER.warn("Unable to convert to any detected Class");
+      return null;
+    } else {
+      LOGGER.debug("Document has " + typedDocs.size() + " representations: " + typedDocs.toString());
     }
-    public List<Activity> convert(Object document) {
 
-        List<Activity> result = new ArrayList<>();
+    // for each specified / discovered converter
+    for ( ActivityConverter converter : converters ) {
 
-        List<Class> detectedClasses = detectClasses(document);
+      Object typedDoc = typedDocs.get(converter.requiredClass());
 
-        if( detectedClasses.size() == 0 ) {
-            LOGGER.warn("Unable to classify");
-            return null;
-        } else {
-            LOGGER.debug("Classified document as " + detectedClasses);
-        }
-
-        // for each of these classes:
-        //   use TypeUtil to switch the document to that type
-        Map<Class, Object> typedDocs = convertToDetectedClasses(detectedClasses, document);
+      List<Activity> activities = applyConverter(converter, typedDoc);
 
-        if( typedDocs.size() == 0 ) {
-            LOGGER.warn("Unable to convert to any detected Class");
-            return null;
-        }
-        else {
-            LOGGER.debug("Document has " + typedDocs.size() + " representations: " + typedDocs.toString());
-        }
+      result.addAll(activities);
+    }
 
-        // for each specified / discovered converter
-        for( ActivityConverter converter : converters ) {
+    return result;
+  }
 
-            Object typedDoc = typedDocs.get(converter.requiredClass());
+  protected List<Activity> applyConverter(ActivityConverter converter, Object typedDoc) {
 
-            List<Activity> activities = applyConverter(converter, typedDoc);
+    List<Activity> activities = new ArrayList<>();
+    // if the document can be typed as the required class
+    if ( typedDoc != null ) {
 
-            result.addAll(activities);
-        }
+      // let the converter create activities if it can
+      try {
+        activities = convertToActivity(converter, typedDoc);
+      } catch ( Exception ex ) {
+        LOGGER.debug("convertToActivity caught exception " + ex.getMessage());
+      }
 
-        return result;
     }
+    return activities;
+  }
 
-    protected List<Activity> applyConverter(ActivityConverter converter, Object typedDoc) {
-
-        List<Activity> activities = new ArrayList<>();
-        // if the document can be typed as the required class
-        if( typedDoc != null ) {
-
-            // let the converter create activities if it can
-            try {
-                activities = convertToActivity(converter, typedDoc);
-            } catch( Exception e ) {
-                LOGGER.debug("convertToActivity caught exception " + e.getMessage());
-            }
+  protected List<Activity> convertToActivity(ActivityConverter converter, Object document) {
 
-        }
-        return activities;
+    List<Activity> activities = new ArrayList<>();
+    try {
+      activities = converter.toActivityList(document);
+    } catch (ActivityConversionException e1) {
+      LOGGER.debug(converter.getClass().getCanonicalName() + " unable to convert " + converter.requiredClass().getClass().getCanonicalName() + " to Activity");
     }
 
-    protected List<Activity> convertToActivity(ActivityConverter converter, Object document) {
+    for (Activity activity : activities) {
 
-        List<Activity> activities = new ArrayList<>();
-        try {
-            activities = converter.toActivityList(document);
-        } catch (ActivityConversionException e1) {
-            LOGGER.debug(converter.getClass().getCanonicalName() + " unable to convert " + converter.requiredClass().getClass().getCanonicalName() + " to Activity");
-        }
+      if (activity != null) {
 
-        for (Activity activity : activities) {
+        // only accept valid activities
+        //   this primitive validity check should be replaced with
+        //   one that applies javax.validation to JSR303 annotations
+        //   on the Activity json schema once a suitable implementation
+        //   is found.
+        if (!ActivityUtil.isValid(activity)) {
+          activities.remove(activity);
+          LOGGER.debug(converter.getClass().getCanonicalName() + " produced invalid Activity converting " + converter.requiredClass().getClass().getCanonicalName());
+        }
 
-            if (activity != null) {
+      } else {
+        LOGGER.debug(converter.getClass().getCanonicalName() + " returned null converting " + converter.requiredClass().getClass().getCanonicalName() + " to Activity");
+      }
 
-                // only accept valid activities
-                //   this primitive validity check should be replaced with
-                //   one that applies javax.validation to JSR303 annotations
-                //   on the Activity json schema once a suitable implementation
-                //   is found.
-                if (!ActivityUtil.isValid(activity)) {
-                    activities.remove(activity);
-                    LOGGER.debug(converter.getClass().getCanonicalName() + " produced invalid Activity converting " + converter.requiredClass().getClass().getCanonicalName());
-                }
+    }
+    return activities;
 
-            } else {
-                LOGGER.debug(converter.getClass().getCanonicalName() + " returned null converting " + converter.requiredClass().getClass().getCanonicalName() + " to Activity");
-            }
+  }
 
-        }
-        return activities;
+  protected List<Class> detectClasses(Object document) {
 
-    }
+    // ConcurrentHashSet is preferable, but it's only in guava 15+
+    // spark 1.5.0 uses guava 14 so for the moment this is the workaround
+    // Set<Class> detectedClasses = new ConcurrentHashSet();
+    Set<Class> detectedClasses = Collections.newSetFromMap(new ConcurrentHashMap<Class, Boolean>());
 
-    protected List<Class> detectClasses(Object document) {
-
-        // ConcurrentHashSet is preferable, but it's only in guava 15+
-		// spark 1.5.0 uses guava 14 so for the moment this is the workaround
-		// Set<Class> detectedClasses = new ConcurrentHashSet();
-		Set<Class> detectedClasses = Collections.newSetFromMap(new ConcurrentHashMap<Class, Boolean>());
-
-        for( DocumentClassifier classifier : classifiers ) {
-            try {
-                List<Class> detected = classifier.detectClasses(document);
-                if (detected != null && detected.size() > 0)
-                    detectedClasses.addAll(detected);
-            } catch( Exception e) {
-                LOGGER.warn("{} failed in method detectClasses - ()", classifier.getClass().getCanonicalName(), e);
-            }
+    for ( DocumentClassifier classifier : classifiers ) {
+      try {
+        List<Class> detected = classifier.detectClasses(document);
+        if (detected != null && detected.size() > 0) {
+          detectedClasses.addAll(detected);
         }
-
-        return Lists.newArrayList(detectedClasses);
+      } catch ( Exception ex ) {
+        LOGGER.warn("{} failed in method detectClasses - ()", classifier.getClass().getCanonicalName(), ex);
+      }
     }
 
-    private Map<Class, Object> convertToDetectedClasses(List<Class> datumClasses, Object document) {
+    return Lists.newArrayList(detectedClasses);
+  }
 
-        Map<Class, Object> convertedDocuments = new HashMap<>();
-        for( Class detectedClass : datumClasses ) {
+  private Map<Class, Object> convertToDetectedClasses(List<Class> datumClasses, Object document) {
 
-            Object typedDoc;
-            if (detectedClass.isInstance(document))
-                typedDoc = document;
-            else
-                typedDoc = TypeConverterUtil.getInstance().convert(document, detectedClass);
+    Map<Class, Object> convertedDocuments = new HashMap<>();
+    for ( Class detectedClass : datumClasses ) {
 
-            if( typedDoc != null )
-                convertedDocuments.put(detectedClass, typedDoc);
-        }
+      Object typedDoc;
+      if (detectedClass.isInstance(document)) {
+        typedDoc = document;
+      } else {
+        typedDoc = TypeConverterUtil.getInstance().convert(document, detectedClass);
+      }
 
-        return convertedDocuments;
+      if ( typedDoc != null ) {
+        convertedDocuments.put(detectedClass, typedDoc);
+      }
     }
 
-    public void configure() {
-        Reflections reflections = new Reflections(new ConfigurationBuilder()
-                .setUrls(ClasspathHelper.forPackage("org.apache.streams"))
-                .setScanners(new SubTypesScanner()));
-        if ( classifiers.size() == 0) {
-            Set<Class<? extends DocumentClassifier>> classifierClasses = reflections.getSubTypesOf(DocumentClassifier.class);
-            for (Class classifierClass : classifierClasses) {
-                try {
-                    this.classifiers.add((DocumentClassifier) classifierClass.newInstance());
-                } catch (Exception e) {
-                    LOGGER.warn("Exception instantiating " + classifierClass);
-                }
-            }
+    return convertedDocuments;
+  }
+
+  /**
+   * configure ActivityConverterUtil.
+   */
+  public void configure() {
+    Reflections reflections = new Reflections(new ConfigurationBuilder()
+        .setUrls(ClasspathHelper.forPackage("org.apache.streams"))
+        .setScanners(new SubTypesScanner()));
+    if ( classifiers.size() == 0) {
+      Set<Class<? extends DocumentClassifier>> classifierClasses = reflections.getSubTypesOf(DocumentClassifier.class);
+      for (Class classifierClass : classifierClasses) {
+        try {
+          this.classifiers.add((DocumentClassifier) classifierClass.newInstance());
+        } catch (Exception ex) {
+          LOGGER.warn("Exception instantiating " + classifierClass);
         }
-        Preconditions.checkArgument(classifiers.size() > 0);
-        if ( converters.size() == 0) {
-            Set<Class<? extends ActivityConverter>> converterClasses = reflections.getSubTypesOf(ActivityConverter.class);
-            for (Class converterClass : converterClasses) {
-                try {
-                    this.converters.add((ActivityConverter) converterClass.newInstance());
-                } catch (Exception e) {
-                    LOGGER.warn("Exception instantiating " + converterClass);
-                }
-            }
+      }
+    }
+    Preconditions.checkArgument(classifiers.size() > 0);
+    if ( converters.size() == 0) {
+      Set<Class<? extends ActivityConverter>> converterClasses = reflections.getSubTypesOf(ActivityConverter.class);
+      for (Class converterClass : converterClasses) {
+        try {
+          this.converters.add((ActivityConverter) converterClass.newInstance());
+        } catch (Exception ex) {
+          LOGGER.warn("Exception instantiating " + converterClass);
         }
-        Preconditions.checkArgument(this.converters.size() > 0);
+      }
     }
+    Preconditions.checkArgument(this.converters.size() > 0);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterProcessor.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterProcessor.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterProcessor.java
index ff45877..d77cbec 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterProcessor.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterProcessor.java
@@ -19,12 +19,13 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.core.util.DatumUtils;
-import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -34,70 +35,72 @@ import java.util.List;
  * ActivityObjectConverterProcessor is a utility processor for converting any datum document
  * to an Activity.
  *
+ * <p/>
  * By default it will handle string json and objectnode representation of existing Activities,
  * translating them into the POJO representation(s) preferred by each registered/detected
  * ActivityConverter.
  *
+ * <p/>
  * To use this capability without a dedicated stream processor, just use ActivityConverterUtil.
  */
 public class ActivityObjectConverterProcessor implements StreamsProcessor {
 
-    public static final String STREAMS_ID = "ActivityObjectConverterProcessor";
+  public static final String STREAMS_ID = "ActivityObjectConverterProcessor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ActivityObjectConverterProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ActivityObjectConverterProcessor.class);
 
-    private ActivityObjectConverterUtil converterUtil;
+  private ActivityObjectConverterUtil converterUtil;
 
-    private ActivityObjectConverterProcessorConfiguration configuration;
+  private ActivityObjectConverterProcessorConfiguration configuration;
 
-    public ActivityObjectConverterProcessor() {
-    }
+  public ActivityObjectConverterProcessor() {
+  }
 
-    public ActivityObjectConverterProcessor(ActivityObjectConverterProcessorConfiguration configuration) {
-        this.configuration = configuration;
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public ActivityObjectConverterProcessor(ActivityObjectConverterProcessorConfiguration configuration) {
+    this.configuration = configuration;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        List<StreamsDatum> result = Lists.newLinkedList();
-        Object document = entry.getDocument();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        try {
+    List<StreamsDatum> result = Lists.newLinkedList();
+    Object document = entry.getDocument();
 
-            // first determine which classes this document might actually be
-            ActivityObject activityObject = converterUtil.convert(document);
+    try {
 
-            StreamsDatum datum = DatumUtils.cloneDatum(entry);
-            datum.setId(activityObject.getId());
-            datum.setDocument(activityObject);
-            result.add(datum);
+      // first determine which classes this document might actually be
+      ActivityObject activityObject = converterUtil.convert(document);
 
-        } catch( Exception e ) {
-            LOGGER.warn("General exception in process! " + e.getMessage());
-        } finally {
-            return result;
-        }
+      StreamsDatum datum = DatumUtils.cloneDatum(entry);
+      datum.setId(activityObject.getId());
+      datum.setDocument(activityObject);
+      result.add(datum);
 
+    } catch ( Exception ex ) {
+      LOGGER.warn("General exception in process! " + ex.getMessage());
+    } finally {
+      return result;
     }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        if( configurationObject instanceof ActivityObjectConverterProcessorConfiguration)
-            converterUtil = ActivityObjectConverterUtil.getInstance((ActivityObjectConverterProcessorConfiguration)configurationObject);
-        else
-            converterUtil = ActivityObjectConverterUtil.getInstance();
+  }
 
+  @Override
+  public void prepare(Object configurationObject) {
+    if( configurationObject instanceof ActivityObjectConverterProcessorConfiguration) {
+      converterUtil = ActivityObjectConverterUtil.getInstance((ActivityObjectConverterProcessorConfiguration) configurationObject);
+    } else {
+      converterUtil = ActivityObjectConverterUtil.getInstance();
     }
+  }
 
-    @Override
-    public void cleanUp() {
+  @Override
+  public void cleanUp() {
 
-    }
+  }
 
-};
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterUtil.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterUtil.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterUtil.java
index 8085780..408384d 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterUtil.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/ActivityObjectConverterUtil.java
@@ -19,15 +19,15 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import org.apache.streams.data.ActivityObjectConverter;
 import org.apache.streams.data.DocumentClassifier;
-import org.apache.streams.data.util.ActivityUtil;
 import org.apache.streams.exceptions.ActivityConversionException;
-import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 import org.reflections.Reflections;
 import org.reflections.scanners.SubTypesScanner;
 import org.reflections.util.ClasspathHelper;
@@ -46,210 +46,229 @@ import java.util.concurrent.ConcurrentHashMap;
  * ActivityObjectConverterUtil converts document into all possible ActivityObject
  * representations based on registered DocumentClassifiers and ActivityObjectConverters.
  *
+ * <p/>
  * Implementations and contributed modules may implement DocumentClassifiers
  * and ActivityObjectConverters to translate additional document types into desired
  * ActivityObject formats.
  *
+ * <p/>
  * A DocumentClassifier's reponsibility is to recognize document formats and label them,
  * using a jackson-compatible POJO class.
  *
+ * <p/>
  * An ActivityObjectConverter's reponsibility is to converting a raw document associated with an
  * incoming POJO class into an activity object.
  *
  */
 public class ActivityObjectConverterUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ActivityObjectConverterUtil.class);
-
-    private static final ActivityObjectConverterUtil INSTANCE = new ActivityObjectConverterUtil();
-
-    public static ActivityObjectConverterUtil getInstance() {
-        return INSTANCE;
-    }
-
-    public static ActivityObjectConverterUtil getInstance(ActivityObjectConverterProcessorConfiguration configuration) {
-        return new ActivityObjectConverterUtil(configuration);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ActivityObjectConverterUtil.class);
+
+  private static final ActivityObjectConverterUtil INSTANCE = new ActivityObjectConverterUtil();
+
+  public static ActivityObjectConverterUtil getInstance() {
+    return INSTANCE;
+  }
+
+  public static ActivityObjectConverterUtil getInstance(ActivityObjectConverterProcessorConfiguration configuration) {
+    return new ActivityObjectConverterUtil(configuration);
+  }
+
+  private List<DocumentClassifier> classifiers = Lists.newLinkedList();
+  private List<ActivityObjectConverter> converters = Lists.newLinkedList();
+
+  /*
+    Use getInstance to get a globally shared thread-safe ActivityConverterUtil,
+    rather than call this constructor.  Reflection-based resolution of
+    converters across all modules can be slow and should only happen
+    once per JVM.
+   */
+  protected ActivityObjectConverterUtil() {
+    configure();
+  }
+
+  protected ActivityObjectConverterUtil(ActivityObjectConverterProcessorConfiguration configuration) {
+    classifiers = configuration.getClassifiers();
+    converters = configuration.getConverters();
+    configure();
+  }
+
+  /**
+   * convert document to ActivityObject.
+   *
+   * @param document document to convert
+   * @return result
+   */
+  public synchronized ActivityObject convert(Object document) {
+
+    List<Class> detectedClasses = detectClasses(document);
+
+    if (detectedClasses.size() == 0) {
+      LOGGER.warn("Unable to classify");
+      return null;
+    } else {
+      LOGGER.debug("Classified document as " + detectedClasses);
     }
 
-    private List<DocumentClassifier> classifiers = Lists.newLinkedList();
-    private List<ActivityObjectConverter> converters = Lists.newLinkedList();
-
-    /*
-      Use getInstance to get a globally shared thread-safe ActivityConverterUtil,
-      rather than call this constructor.  Reflection-based resolution of
-      converters across all modules can be slow and should only happen
-      once per JVM.
-     */
-    protected ActivityObjectConverterUtil() {
-        configure();
-    }
+    // for each of these classes:
+    //   use TypeUtil to switch the document to that type
+    Map<Class, Object> typedDocs = convertToDetectedClasses(detectedClasses, document);
 
-    protected ActivityObjectConverterUtil(ActivityObjectConverterProcessorConfiguration configuration) {
-        classifiers = configuration.getClassifiers();
-        converters = configuration.getConverters();
-        configure();
+    if (typedDocs.size() == 0) {
+      LOGGER.warn("Unable to convert to any detected Class");
+      return null;
+    } else {
+      LOGGER.debug("Document has " + typedDocs.size() + " representations: " + typedDocs.toString());
     }
 
-    public synchronized ActivityObject convert(Object document) {
+    Map<Class, ActivityObject> convertedDocs = new HashMap<>();
 
-        List<Class> detectedClasses = detectClasses(document);
+    // for each specified / discovered converter
+    for (ActivityObjectConverter converter : converters) {
 
-        if (detectedClasses.size() == 0) {
-            LOGGER.warn("Unable to classify");
-            return null;
-        } else {
-            LOGGER.debug("Classified document as " + detectedClasses);
-        }
+      Class requiredClass = converter.requiredClass();
 
-        // for each of these classes:
-        //   use TypeUtil to switch the document to that type
-        Map<Class, Object> typedDocs = convertToDetectedClasses(detectedClasses, document);
+      Object typedDoc = typedDocs.get(requiredClass);
 
-        if (typedDocs.size() == 0) {
-            LOGGER.warn("Unable to convert to any detected Class");
-            return null;
-        } else {
-            LOGGER.debug("Document has " + typedDocs.size() + " representations: " + typedDocs.toString());
-        }
+      ActivityObject activityObject = applyConverter(converter, typedDoc);
 
-        Map<Class, ActivityObject> convertedDocs = new HashMap<>();
+      convertedDocs.put(requiredClass, activityObject);
+    }
 
-        // for each specified / discovered converter
-        for (ActivityObjectConverter converter : converters) {
+    ActivityObject result = deepestDescendant(convertedDocs);
 
-            Class requiredClass = converter.requiredClass();
+    return result;
+  }
 
-            Object typedDoc = typedDocs.get(requiredClass);
+  protected ActivityObject applyConverter(ActivityObjectConverter converter, Object typedDoc) {
 
-            ActivityObject activityObject = applyConverter(converter, typedDoc);
+    ActivityObject activityObject = null;
+    // if the document can be typed as the required class
+    if (typedDoc != null) {
 
-            convertedDocs.put(requiredClass, activityObject);
-        }
+      // let the converter create activities if it can
+      try {
+        activityObject = convertToActivityObject(converter, typedDoc);
+      } catch (Exception ex) {
+        LOGGER.debug("convertToActivity caught exception " + ex.getMessage());
+      }
 
-        ActivityObject result = deepestDescendant(convertedDocs);
-
-        return result;
     }
+    return activityObject;
+  }
 
-    protected ActivityObject applyConverter(ActivityObjectConverter converter, Object typedDoc) {
-
-        ActivityObject activityObject = null;
-        // if the document can be typed as the required class
-        if (typedDoc != null) {
+  protected ActivityObject convertToActivityObject(ActivityObjectConverter converter, Object document) {
 
-            // let the converter create activities if it can
-            try {
-                activityObject = convertToActivityObject(converter, typedDoc);
-            } catch (Exception e) {
-                LOGGER.debug("convertToActivity caught exception " + e.getMessage());
-            }
-
-        }
-        return activityObject;
+    ActivityObject activityObject = null;
+    try {
+      activityObject = converter.toActivityObject(document);
+    } catch (ActivityConversionException e1) {
+      LOGGER.debug(converter.getClass().getCanonicalName() + " unable to convert " + converter.requiredClass().getClass().getCanonicalName() + " to Activity");
     }
 
-    protected ActivityObject convertToActivityObject(ActivityObjectConverter converter, Object document) {
+    return activityObject;
 
-        ActivityObject activityObject = null;
-        try {
-            activityObject = converter.toActivityObject(document);
-        } catch (ActivityConversionException e1) {
-            LOGGER.debug(converter.getClass().getCanonicalName() + " unable to convert " + converter.requiredClass().getClass().getCanonicalName() + " to Activity");
-        }
+  }
 
-        return activityObject;
-
-    }
+  protected List<Class> detectClasses(Object document) {
 
-    protected List<Class> detectClasses(Object document) {
-
-        // ConcurrentHashSet is preferable, but it's only in guava 15+
-        // spark 1.5.0 uses guava 14 so for the moment this is the workaround
-        // Set<Class> detectedClasses = new ConcurrentHashSet();
-        Set<Class> detectedClasses = Collections.newSetFromMap(new ConcurrentHashMap<Class, Boolean>());
-        for (DocumentClassifier classifier : classifiers) {
-            try {
-                List<Class> detected = classifier.detectClasses(document);
-                if (detected != null && detected.size() > 0)
-                    detectedClasses.addAll(detected);
-            } catch (Exception e) {
-                LOGGER.warn("{} failed in method detectClasses - ()", classifier.getClass().getCanonicalName(), e);
-            }
+    // ConcurrentHashSet is preferable, but it's only in guava 15+
+    // spark 1.5.0 uses guava 14 so for the moment this is the workaround
+    // Set<Class> detectedClasses = new ConcurrentHashSet();
+    Set<Class> detectedClasses = Collections.newSetFromMap(new ConcurrentHashMap<Class, Boolean>());
+    for (DocumentClassifier classifier : classifiers) {
+      try {
+        List<Class> detected = classifier.detectClasses(document);
+        if (detected != null && detected.size() > 0) {
+          detectedClasses.addAll(detected);
         }
-
-        return Lists.newArrayList(detectedClasses);
+      } catch (Exception ex) {
+        LOGGER.warn("{} failed in method detectClasses - ()", classifier.getClass().getCanonicalName(), ex);
+      }
     }
 
-    private Map<Class, Object> convertToDetectedClasses(List<Class> datumClasses, Object document) {
+    return Lists.newArrayList(detectedClasses);
+  }
 
-        Map<Class, Object> convertedDocuments = Maps.newHashMap();
-        for (Class detectedClass : datumClasses) {
+  private Map<Class, Object> convertToDetectedClasses(List<Class> datumClasses, Object document) {
 
-            Object typedDoc;
-            if (detectedClass.isInstance(document))
-                typedDoc = document;
-            else
-                typedDoc = TypeConverterUtil.getInstance().convert(document, detectedClass);
+    Map<Class, Object> convertedDocuments = Maps.newHashMap();
+    for (Class detectedClass : datumClasses) {
 
-            if (typedDoc != null)
-                convertedDocuments.put(detectedClass, typedDoc);
-        }
+      Object typedDoc;
+      if (detectedClass.isInstance(document)) {
+        typedDoc = document;
+      } else {
+        typedDoc = TypeConverterUtil.getInstance().convert(document, detectedClass);
+      }
 
-        return convertedDocuments;
+      if (typedDoc != null) {
+        convertedDocuments.put(detectedClass, typedDoc);
+      }
     }
 
-    public void configure() {
-        Reflections reflections = new Reflections(new ConfigurationBuilder()
-                .setUrls(ClasspathHelper.forPackage("org.apache.streams"))
-                .setScanners(new SubTypesScanner()));
-        if (classifiers.size() == 0) {
-            Set<Class<? extends DocumentClassifier>> classifierClasses = reflections.getSubTypesOf(DocumentClassifier.class);
-            for (Class classifierClass : classifierClasses) {
-                try {
-                    this.classifiers.add((DocumentClassifier) classifierClass.newInstance());
-                } catch (Exception e) {
-                    LOGGER.warn("Exception instantiating " + classifierClass);
-                }
-            }
+    return convertedDocuments;
+  }
+
+  /**
+   * configure ActivityObjectConverterUtil.
+   */
+  public void configure() {
+    Reflections reflections = new Reflections(new ConfigurationBuilder()
+        .setUrls(ClasspathHelper.forPackage("org.apache.streams"))
+        .setScanners(new SubTypesScanner()));
+    if (classifiers.size() == 0) {
+      Set<Class<? extends DocumentClassifier>> classifierClasses = reflections.getSubTypesOf(DocumentClassifier.class);
+      for (Class classifierClass : classifierClasses) {
+        try {
+          this.classifiers.add((DocumentClassifier) classifierClass.newInstance());
+        } catch (Exception ex) {
+          LOGGER.warn("Exception instantiating " + classifierClass);
         }
-        Preconditions.checkArgument(classifiers.size() > 0);
-        if (converters.size() == 0) {
-            Set<Class<? extends ActivityObjectConverter>> converterClasses = reflections.getSubTypesOf(ActivityObjectConverter.class);
-            for (Class converterClass : converterClasses) {
-                try {
-                    this.converters.add((ActivityObjectConverter) converterClass.newInstance());
-                } catch (Exception e) {
-                    LOGGER.warn("Exception instantiating " + converterClass);
-                }
-            }
+      }
+    }
+    Preconditions.checkArgument(classifiers.size() > 0);
+    if (converters.size() == 0) {
+      Set<Class<? extends ActivityObjectConverter>> converterClasses = reflections.getSubTypesOf(ActivityObjectConverter.class);
+      for (Class converterClass : converterClasses) {
+        try {
+          this.converters.add((ActivityObjectConverter) converterClass.newInstance());
+        } catch (Exception ex) {
+          LOGGER.warn("Exception instantiating " + converterClass);
         }
-        Preconditions.checkArgument(this.converters.size() > 0);
+      }
     }
+    Preconditions.checkArgument(this.converters.size() > 0);
+  }
 
-    private boolean isAncestor(Class possibleDescendant, Class possibleAncestor) {
-        if (possibleDescendant.equals(Object.class))
-            return false;
-        if (possibleDescendant.getSuperclass().equals(possibleAncestor))
-            return true;
-        else return isAncestor(possibleDescendant.getSuperclass(), possibleAncestor);
+  private boolean isAncestor(Class possibleDescendant, Class possibleAncestor) {
+    if (possibleDescendant.equals(Object.class)) {
+      return false;
+    }
+    if (possibleDescendant.getSuperclass().equals(possibleAncestor)) {
+      return true;
+    } else {
+      return isAncestor(possibleDescendant.getSuperclass(), possibleAncestor);
     }
+  }
 
-    // prefer the most specific ActivityObject sub-class returned by all converters
-    private ActivityObject deepestDescendant(Map<Class, ActivityObject> map) {
+  // prefer the most specific ActivityObject sub-class returned by all converters
+  private ActivityObject deepestDescendant(Map<Class, ActivityObject> map) {
 
-        ActivityObject result = null;
+    ActivityObject result = null;
 
-        for( Map.Entry<Class, ActivityObject> entry : map.entrySet()) {
-            if( entry.getKey() != null ) {
-                if (result == null)
-                    result = entry.getValue();
-                else if (isAncestor(entry.getKey(), result.getClass()))
-                    result = entry.getValue();
-            }
+    for ( Map.Entry<Class, ActivityObject> entry : map.entrySet()) {
+      if ( entry.getKey() != null ) {
+        if (result == null) {
+          result = entry.getValue();
+        } else if (isAncestor(entry.getKey(), result.getClass())) {
+          result = entry.getValue();
         }
-
-        return result;
+      }
     }
 
+    return result;
+  }
+
 
 }



[32/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUtils.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUtils.java b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUtils.java
index 6b4f28a..d1936d1 100644
--- a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUtils.java
+++ b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUtils.java
@@ -21,6 +21,7 @@ package org.apache.streams.regex;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,73 +31,73 @@ import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 /**
- * Provides utilities for extracting matches from content
+ * Provides utilities for extracting matches from content.
  */
 public class RegexUtils {
 
-    private static final Map<String, Pattern> patternCache = Maps.newConcurrentMap();
-    private final static Logger LOGGER = LoggerFactory.getLogger(RegexUtils.class);
+  private static final Map<String, Pattern> patternCache = Maps.newConcurrentMap();
+  private static final Logger LOGGER = LoggerFactory.getLogger(RegexUtils.class);
 
-    private RegexUtils() {}
+  private RegexUtils() {}
 
-    /**
-     * Extracts matches of the given pattern in the content and returns them as a list.
-     * @param pattern the pattern for the substring to match.  For example, [0-9]* matches 911 in Emergency number is 911.
-     * @param content the complete content to find matches in.
-     * @return a non-null list of matches.
-     */
-    public static Map<String, List<Integer>> extractMatches(String pattern, String content) {
-        return getMatches(pattern, content, -1);
-    }
+  /**
+   * Extracts matches of the given pattern in the content and returns them as a list.
+   * @param pattern the pattern for the substring to match.  For example, [0-9]* matches 911 in Emergency number is 911.
+   * @param content the complete content to find matches in.
+   * @return a non-null list of matches.
+   */
+  public static Map<String, List<Integer>> extractMatches(String pattern, String content) {
+    return getMatches(pattern, content, -1);
+  }
 
-    /**
-     * Extracts matches of the given pattern that are bounded by separation characters and returns them as a list.
-     * @param pattern the pattern for the substring to match.  For example, [0-9]* matches 911 in Emergency number is 911.
-     * @param content the complete content to find matches in.
-     * @return a non-null list of matches.
-     */
-    public static Map<String, List<Integer>> extractWordMatches(String pattern, String content) {
-        pattern = "(^|\\s)(" + pattern + ")([\\s!\\.;,?]|$)";
-        return getMatches(pattern, content, 2);
-    }
+  /**
+   * Extracts matches of the given pattern that are bounded by separation characters and returns them as a list.
+   * @param pattern the pattern for the substring to match.  For example, [0-9]* matches 911 in Emergency number is 911.
+   * @param content the complete content to find matches in.
+   * @return a non-null list of matches.
+   */
+  public static Map<String, List<Integer>> extractWordMatches(String pattern, String content) {
+    pattern = "(^|\\s)(" + pattern + ")([\\s!\\.;,?]|$)";
+    return getMatches(pattern, content, 2);
+  }
 
-    protected static Map<String, List<Integer>> getMatches(String pattern, String content, int capture) {
-        try {
-            Map<String, List<Integer>> matches = Maps.newHashMap();
-            if(content == null) {
-                return matches;
-            }
+  protected static Map<String, List<Integer>> getMatches(String pattern, String content, int capture) {
+    try {
+      Map<String, List<Integer>> matches = Maps.newHashMap();
+      if (content == null) {
+        return matches;
+      }
 
-            Matcher m = getPattern(pattern).matcher(content);
-            while (m.find()) {
-                String group = capture > 0 ? m.group(capture) : m.group();
-                if (group != null && !group.equals("")) {
-                    List<Integer> indices;
-                    if (matches.containsKey(group)) {
-                        indices = matches.get(group);
-                    } else {
-                        indices = Lists.newArrayList();
-                        matches.put(group, indices);
-                    }
-                    indices.add(m.start());
-                }
-            }
-            return matches;
-        } catch (Throwable e) {
-            LOGGER.error("Throwable process {}", e);
-            e.printStackTrace();
-            throw new RuntimeException(e);
+      Matcher matcher = getPattern(pattern).matcher(content);
+      while (matcher.find()) {
+        String group = capture > 0 ? matcher.group(capture) : matcher.group();
+        if (group != null && !group.equals("")) {
+          List<Integer> indices;
+          if (matches.containsKey(group)) {
+            indices = matches.get(group);
+          } else {
+            indices = Lists.newArrayList();
+            matches.put(group, indices);
+          }
+          indices.add(matcher.start());
         }
+      }
+      return matches;
+    } catch (Throwable ex) {
+      LOGGER.error("Throwable process {}", ex);
+      ex.printStackTrace();
+      throw new RuntimeException(ex);
     }
+  }
 
-    private static Pattern getPattern(String pattern) {
-        Pattern p;
-        if (patternCache.containsKey(pattern)) {
-            p = patternCache.get(pattern);
-        } else {
-            p = Pattern.compile(pattern);
-            patternCache.put(pattern, p);
-        }
-        return p;
+  private static Pattern getPattern(String patternString) {
+    Pattern pattern;
+    if (patternCache.containsKey(patternString)) {
+      pattern = patternCache.get(patternString);
+    } else {
+      pattern = Pattern.compile(patternString);
+      patternCache.put(patternString, pattern);
     }
+    return pattern;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexHashtagExtractorTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexHashtagExtractorTest.java b/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexHashtagExtractorTest.java
index 2de4aa8..6e17de8 100644
--- a/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexHashtagExtractorTest.java
+++ b/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexHashtagExtractorTest.java
@@ -19,7 +19,6 @@
 
 package org.apache.streams.regex;
 
-
 import com.google.common.collect.Sets;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.pojo.extensions.ExtensionUtil;

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexMentionExtractorTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexMentionExtractorTest.java b/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexMentionExtractorTest.java
index c7778a8..66f7aa5 100644
--- a/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexMentionExtractorTest.java
+++ b/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexMentionExtractorTest.java
@@ -19,7 +19,6 @@
 
 package org.apache.streams.regex;
 
-
 import com.google.common.collect.Sets;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.pojo.extensions.ExtensionUtil;

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUrlExtractorTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUrlExtractorTest.java b/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUrlExtractorTest.java
index 344bf98..d5d8d9b 100644
--- a/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUrlExtractorTest.java
+++ b/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUrlExtractorTest.java
@@ -19,7 +19,6 @@
 
 package org.apache.streams.regex;
 
-
 import com.google.common.collect.Sets;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.pojo.json.Activity;

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUtilsTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUtilsTest.java b/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUtilsTest.java
index fc2b9f6..a156f3a 100644
--- a/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUtilsTest.java
+++ b/streams-contrib/streams-processor-regex/src/test/java/org/apache/streams/regex/RegexUtilsTest.java
@@ -19,7 +19,6 @@
 
 package org.apache.streams.regex;
 
-
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -33,7 +32,6 @@ import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.is;
 import static org.junit.Assert.assertThat;
 
-
 @RunWith(Parameterized.class)
 public class RegexUtilsTest {
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPageActivitySerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPageActivitySerializer.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPageActivitySerializer.java
index 7a6648a..1216c38 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPageActivitySerializer.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPageActivitySerializer.java
@@ -18,15 +18,17 @@
 
 package org.apache.streams.facebook.api;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivitySerializer;
 import org.apache.streams.exceptions.ActivitySerializerException;
+import org.apache.streams.facebook.Page;
 import org.apache.streams.facebook.serializer.FacebookActivityUtil;
 import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.facebook.Page;
 import org.apache.streams.pojo.json.Activity;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.apache.commons.lang.NotImplementedException;
+
 import java.util.List;
 
 /**
@@ -35,32 +37,32 @@ import java.util.List;
  */
 public class FacebookPageActivitySerializer implements ActivitySerializer<Page> {
 
-    public static ObjectMapper mapper;
-    static {
-        mapper = StreamsJacksonMapper.getInstance();
-    }
+  public static ObjectMapper mapper;
+  static {
+    mapper = StreamsJacksonMapper.getInstance();
+  }
 
-    @Override
-    public String serializationFormat() {
-        return "facebook_post_json_v1";
-    }
+  @Override
+  public String serializationFormat() {
+    return "facebook_post_json_v1";
+  }
 
-    @Override
-    public Page serialize(Activity deserialized) throws ActivitySerializerException {
-        throw new NotImplementedException("Not currently supported by this deserializer");
-    }
+  @Override
+  public Page serialize(Activity deserialized) throws ActivitySerializerException {
+    throw new NotImplementedException("Not currently supported by this deserializer");
+  }
 
-    @Override
-    public Activity deserialize(Page page) throws ActivitySerializerException {
-        Activity activity = new Activity();
+  @Override
+  public Activity deserialize(Page page) throws ActivitySerializerException {
+    Activity activity = new Activity();
 
-        FacebookActivityUtil.updateActivity(page, activity);
+    FacebookActivityUtil.updateActivity(page, activity);
 
-        return activity;
-    }
+    return activity;
+  }
 
-    @Override
-    public List<Activity> deserializeAll(List<Page> serializedList) {
-        throw new NotImplementedException("Not currently supported by this deserializer");
-    }
+  @Override
+  public List<Activity> deserializeAll(List<Page> serializedList) {
+    throw new NotImplementedException("Not currently supported by this deserializer");
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPostActivitySerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPostActivitySerializer.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPostActivitySerializer.java
index 4326fb1..306fecc 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPostActivitySerializer.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/api/FacebookPostActivitySerializer.java
@@ -15,16 +15,19 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.facebook.api;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivitySerializer;
 import org.apache.streams.exceptions.ActivitySerializerException;
 import org.apache.streams.facebook.Post;
 import org.apache.streams.facebook.serializer.FacebookActivityUtil;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.apache.commons.lang.NotImplementedException;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
 import org.joda.time.format.ISODateTimeFormat;
@@ -33,37 +36,34 @@ import java.util.List;
 
 public class FacebookPostActivitySerializer implements ActivitySerializer<org.apache.streams.facebook.Post> {
 
-    public static final DateTimeFormatter FACEBOOK_FORMAT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ");
-    public static final DateTimeFormatter ACTIVITY_FORMAT = ISODateTimeFormat.basicDateTime();
+  public static final DateTimeFormatter FACEBOOK_FORMAT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ");
+  public static final DateTimeFormatter ACTIVITY_FORMAT = ISODateTimeFormat.basicDateTime();
 
-    public static final String PROVIDER_NAME = "Facebook";
+  public static final String PROVIDER_NAME = "Facebook";
 
-    public static ObjectMapper mapper;
-    static {
-        mapper = StreamsJacksonMapper.getInstance();
-    }
+  public static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Override
-    public String serializationFormat() {
-        return "facebook_post_json_v1";
-    }
+  @Override
+  public String serializationFormat() {
+    return "facebook_post_json_v1";
+  }
 
-    @Override
-    public Post serialize(Activity deserialized) throws ActivitySerializerException {
-        throw new NotImplementedException("Not currently supported by this deserializer");
-    }
+  @Override
+  public Post serialize(Activity deserialized) throws ActivitySerializerException {
+    throw new NotImplementedException("Not currently supported by this deserializer");
+  }
 
-    @Override
-    public Activity deserialize(Post post) throws ActivitySerializerException {
-        Activity activity = new Activity();
+  @Override
+  public Activity deserialize(Post post) throws ActivitySerializerException {
+    Activity activity = new Activity();
 
-        FacebookActivityUtil.updateActivity(post, activity);
+    FacebookActivityUtil.updateActivity(post, activity);
 
-        return activity;
-    }
+    return activity;
+  }
 
-    @Override
-    public List<Activity> deserializeAll(List<Post> serializedList) {
-        throw new NotImplementedException("Not currently supported by this deserializer");
-    }
+  @Override
+  public List<Activity> deserializeAll(List<Post> serializedList) {
+    throw new NotImplementedException("Not currently supported by this deserializer");
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/processor/FacebookTypeConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/processor/FacebookTypeConverter.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/processor/FacebookTypeConverter.java
index 762b6c0..92cf333 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/processor/FacebookTypeConverter.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/processor/FacebookTypeConverter.java
@@ -18,12 +18,6 @@
 
 package org.apache.streams.facebook.processor;
 
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.exceptions.ActivitySerializerException;
@@ -34,6 +28,14 @@ import org.apache.streams.facebook.api.FacebookPostActivitySerializer;
 import org.apache.streams.facebook.provider.FacebookEventClassifier;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -41,172 +43,187 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Queue;
 
+/**
+ * FacebookTypeConverter converts facebook data to activity streams types.
+ */
 public class FacebookTypeConverter implements StreamsProcessor {
 
-    public final static String STREAMS_ID = "FacebookTypeConverter";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(FacebookTypeConverter.class);
-
-    private ObjectMapper mapper;
-
-    private Queue<StreamsDatum> inQueue;
-    private Queue<StreamsDatum> outQueue;
-
-    private Class inClass;
-    private Class outClass;
-
-    private FacebookPostActivitySerializer facebookPostActivitySerializer;
-    private FacebookPageActivitySerializer facebookPageActivitySerializer;
-
-    private int count = 0;
-
-    public final static String TERMINATE = new String("TERMINATE");
-
-    public FacebookTypeConverter(Class inClass, Class outClass) {
-        this.inClass = inClass;
-        this.outClass = outClass;
-    }
-
-    public Queue<StreamsDatum> getProcessorOutputQueue() {
-        return outQueue;
-    }
-
-    public void setProcessorInputQueue(Queue<StreamsDatum> inputQueue) {
-        inQueue = inputQueue;
+  public static final String STREAMS_ID = "FacebookTypeConverter";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookTypeConverter.class);
+
+  private ObjectMapper mapper;
+
+  private Queue<StreamsDatum> inQueue;
+  private Queue<StreamsDatum> outQueue;
+
+  private Class inClass;
+  private Class outClass;
+
+  private FacebookPostActivitySerializer facebookPostActivitySerializer;
+  private FacebookPageActivitySerializer facebookPageActivitySerializer;
+
+  private int count = 0;
+
+  public static final String TERMINATE = new String("TERMINATE");
+
+  public FacebookTypeConverter(Class inClass, Class outClass) {
+    this.inClass = inClass;
+    this.outClass = outClass;
+  }
+
+  public Queue<StreamsDatum> getProcessorOutputQueue() {
+    return outQueue;
+  }
+
+  public void setProcessorInputQueue(Queue<StreamsDatum> inputQueue) {
+    inQueue = inputQueue;
+  }
+
+  /**
+   * convert.
+   * @param event event
+   * @param inClass inClass
+   * @param outClass outClass
+   * @return Object
+   * @throws ActivitySerializerException ActivitySerializerException
+   * @throws JsonProcessingException JsonProcessingException
+   */
+  public Object convert(ObjectNode event, Class inClass, Class outClass) throws ActivitySerializerException, JsonProcessingException {
+
+    Object result = null;
+
+    if ( outClass.equals( Activity.class )) {
+      LOGGER.debug("ACTIVITY");
+      if (inClass.equals(Post.class)) {
+        LOGGER.debug("POST");
+        result = facebookPostActivitySerializer.deserialize(mapper.convertValue(event, Post.class));
+      } else if (inClass.equals(Page.class)) {
+        LOGGER.debug("PAGE");
+        result = facebookPageActivitySerializer.deserialize(mapper.convertValue(event, Page.class));
+      }
+    } else if ( outClass.equals( Post.class )) {
+      LOGGER.debug("POST");
+      result = mapper.convertValue(event, Post.class);
+    } else if ( outClass.equals(Page.class)) {
+      LOGGER.debug("PAGE");
+      result = mapper.convertValue(event, Page.class);
+    } else if ( outClass.equals( ObjectNode.class )) {
+      LOGGER.debug("OBJECTNODE");
+      result = mapper.convertValue(event, ObjectNode.class);
     }
 
-    public Object convert(ObjectNode event, Class inClass, Class outClass) throws ActivitySerializerException, JsonProcessingException {
-
-        Object result = null;
-
-        if( outClass.equals( Activity.class )) {
-            LOGGER.debug("ACTIVITY");
-            if(inClass.equals(Post.class)) {
-                LOGGER.debug("POST");
-                result = facebookPostActivitySerializer.deserialize(mapper.convertValue(event, Post.class));
-            } else if(inClass.equals(Page.class)) {
-                LOGGER.debug("PAGE");
-                result = facebookPageActivitySerializer.deserialize(mapper.convertValue(event, Page.class));
-            }
-        } else if( outClass.equals( Post.class )) {
-            LOGGER.debug("POST");
-            result = mapper.convertValue(event, Post.class);
-        } else if( outClass.equals(Page.class)) {
-            LOGGER.debug("PAGE");
-            result = mapper.convertValue(event, Page.class);
-        } else if( outClass.equals( ObjectNode.class )) {
-            LOGGER.debug("OBJECTNODE");
-            result = mapper.convertValue(event, ObjectNode.class);
-        }
-
-        // no supported conversion were applied
-        if( result != null ) {
-            count ++;
-            return result;
-        }
-
-        LOGGER.debug("CONVERT FAILED");
-
-        return null;
+    // no supported conversion were applied
+    if ( result != null ) {
+      count ++;
+      return result;
     }
 
-    public boolean validate(Object document, Class klass) {
-
-        // TODO
-        return true;
+    LOGGER.debug("CONVERT FAILED");
+
+    return null;
+  }
+
+  // TODO: use standard validation
+  public boolean validate(Object document, Class klass) {
+    return true;
+  }
+
+  // TODO: replace with standard validation
+  public boolean isValidJSON(final String json) {
+    boolean valid = false;
+    try {
+      final JsonParser parser = new ObjectMapper().getJsonFactory()
+          .createJsonParser(json);
+      while (parser.nextToken() != null) {
+      }
+      valid = true;
+    } catch (JsonParseException jpe) {
+      LOGGER.warn("validate: {}", jpe);
+    } catch (IOException ioe) {
+      LOGGER.warn("validate: {}", ioe);
     }
 
-    public boolean isValidJSON(final String json) {
-        boolean valid = false;
-        try {
-            final JsonParser parser = new ObjectMapper().getJsonFactory()
-                    .createJsonParser(json);
-            while (parser.nextToken() != null) {
-            }
-            valid = true;
-        } catch (JsonParseException jpe) {
-            LOGGER.warn("validate: {}", jpe);
-        } catch (IOException ioe) {
-            LOGGER.warn("validate: {}", ioe);
-        }
-
-        return valid;
-    }
+    return valid;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        StreamsDatum result = null;
+    StreamsDatum result = null;
 
-        try {
-            Object item = entry.getDocument();
-            ObjectNode node;
+    try {
+      Object item = entry.getDocument();
+      ObjectNode node;
 
-            LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
+      LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
 
-            if( item instanceof String ) {
+      if ( item instanceof String ) {
 
-                // if the target is string, just pass-through
-                if( String.class.equals(outClass)) {
-                    result = entry;
-                }
-                else {
-                    // first check for valid json
-                    node = (ObjectNode)mapper.readTree((String)item);
+        // if the target is string, just pass-through
+        if ( String.class.equals(outClass)) {
+          result = entry;
+        } else {
+          // first check for valid json
+          node = (ObjectNode)mapper.readTree((String)item);
 
-                    // since data is coming from outside provider, we don't know what type the events are
-                    // for now we'll assume post
-                    Class inClass = FacebookEventClassifier.detectClass((String) item);
+          // since data is coming from outside provider, we don't know what type the events are
+          // for now we'll assume post
+          Class inClass = FacebookEventClassifier.detectClass((String) item);
 
-                    Object out = convert(node, inClass, outClass);
+          Object out = convert(node, inClass, outClass);
 
-                    if( out != null && validate(out, outClass))
-                        result = new StreamsDatum(out);
-                }
+          if ( out != null && validate(out, outClass)) {
+            result = new StreamsDatum(out);
+          }
+        }
 
-            } else if( item instanceof ObjectNode) {
+      } else if ( item instanceof ObjectNode) {
 
-                // first check for valid json
-                node = (ObjectNode)mapper.valueToTree(item);
+        // first check for valid json
+        node = (ObjectNode)mapper.valueToTree(item);
 
-                Class inClass = FacebookEventClassifier.detectClass(mapper.writeValueAsString(item));
+        Class inClass = FacebookEventClassifier.detectClass(mapper.writeValueAsString(item));
 
-                Object out = convert(node, inClass, outClass);
+        Object out = convert(node, inClass, outClass);
 
-                if( out != null && validate(out, outClass))
-                    result = new StreamsDatum(out);
-            } else if(item instanceof Post || item instanceof Page) {
-                Object out = convert(mapper.convertValue(item, ObjectNode.class), inClass, outClass);
+        if ( out != null && validate(out, outClass)) {
+          result = new StreamsDatum(out);
+        }
+      } else if (item instanceof Post || item instanceof Page) {
+        Object out = convert(mapper.convertValue(item, ObjectNode.class), inClass, outClass);
 
-                if( out != null && validate(out, outClass))
-                    result = new StreamsDatum(out);
-            }
-        }  catch (Exception e) {
-            LOGGER.error("Exception switching types : {}", e);
-            if(e instanceof InterruptedException) {
-                Thread.currentThread().interrupt();
-            }
+        if ( out != null && validate(out, outClass)) {
+          result = new StreamsDatum(out);
         }
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception switching types : {}", ex);
+      if (ex instanceof InterruptedException) {
+        Thread.currentThread().interrupt();
+      }
+    }
 
-        if( result != null )
-            return Lists.newArrayList(result);
-        else
-            return Lists.newArrayList();
+    if ( result != null ) {
+      return Lists.newArrayList(result);
+    } else {
+      return Lists.newArrayList();
     }
+  }
 
-    @Override
-    public void prepare(Object o) {
-        mapper = StreamsJacksonMapper.getInstance();
+  @Override
+  public void prepare(Object configurationObject) {
+    mapper = StreamsJacksonMapper.getInstance();
 
-        facebookPageActivitySerializer = new FacebookPageActivitySerializer();
-        facebookPostActivitySerializer = new FacebookPostActivitySerializer();
-    }
+    facebookPageActivitySerializer = new FacebookPageActivitySerializer();
+    facebookPostActivitySerializer = new FacebookPostActivitySerializer();
+  }
 
-    @Override
-    public void cleanUp() {}
+  @Override
+  public void cleanUp() {}
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookDataCollector.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookDataCollector.java
index 33ee9dc..617bfab 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookDataCollector.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookDataCollector.java
@@ -15,130 +15,139 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.facebook.provider;
 
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Strings;
-import facebook4j.Facebook;
-import facebook4j.FacebookFactory;
-import facebook4j.conf.ConfigurationBuilder;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.facebook.FacebookConfiguration;
 import org.apache.streams.facebook.IdConfig;
 import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 import org.apache.streams.util.api.requests.backoff.impl.ExponentialBackOffStrategy;
 import org.apache.streams.util.oauth.tokens.tokenmanager.SimpleTokenManager;
-import org.apache.streams.util.oauth.tokens.tokenmanager.impl.BasicTokenManger;
+import org.apache.streams.util.oauth.tokens.tokenmanager.impl.BasicTokenManager;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.atomic.AtomicBoolean;
 
+import facebook4j.Facebook;
+import facebook4j.FacebookFactory;
+import facebook4j.conf.ConfigurationBuilder;
+
 /**
  * Abstract data collector for Facebook.  Iterates over ids and queues data to be output
  * by a {@link org.apache.streams.core.StreamsProvider}
  */
 public abstract class FacebookDataCollector implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookDataCollector.class);
-    private static final String READ_ONLY = "read_streams";
-
-    @VisibleForTesting
-    protected AtomicBoolean isComplete;
-    protected BackOffStrategy backOff;
-
-    private FacebookConfiguration config;
-    private BlockingQueue<StreamsDatum> queue;
-    private SimpleTokenManager<String> authTokens;
-
-
-    public FacebookDataCollector(FacebookConfiguration config, BlockingQueue<StreamsDatum> queue) {
-        this.config = config;
-        this.queue = queue;
-        this.isComplete = new AtomicBoolean(false);
-        this.backOff = new ExponentialBackOffStrategy(5);
-        this.authTokens = new BasicTokenManger<String>();
-        if(config.getUserAccessTokens() != null) {
-            for(String token : config.getUserAccessTokens()) {
-                this.authTokens.addTokenToPool(token);
-            }
-        }
-    }
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookDataCollector.class);
+  private static final String READ_ONLY = "read_streams";
 
-    /**
-     * Returns true when the collector has finished querying facebook and has queued all data
-     * for the provider
-     * @return
-     */
-    public boolean isComplete(){
-        return this.isComplete.get();
-    }
+  @VisibleForTesting
+  protected AtomicBoolean isComplete;
+  protected BackOffStrategy backOff;
+
+  private FacebookConfiguration config;
+  private BlockingQueue<StreamsDatum> queue;
+  private SimpleTokenManager<String> authTokens;
 
-    /**
-     * Queues facebook data
-     * @param data
-     * @param id
-     */
-    protected void outputData(Object data, String id) {
-        try {
-            this.queue.put(new StreamsDatum(data, id));
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
+  /**
+   * FacebookDataCollector constructor.
+   * @param config config
+   * @param queue queue
+   */
+  public FacebookDataCollector(FacebookConfiguration config, BlockingQueue<StreamsDatum> queue) {
+    this.config = config;
+    this.queue = queue;
+    this.isComplete = new AtomicBoolean(false);
+    this.backOff = new ExponentialBackOffStrategy(5);
+    this.authTokens = new BasicTokenManager<String>();
+    if (config.getUserAccessTokens() != null) {
+      for (String token : config.getUserAccessTokens()) {
+        this.authTokens.addTokenToPool(token);
+      }
     }
+  }
 
-    /**
-     * Gets a Facebook client.  If multiple authenticated users for this app are available
-     * it will rotate through the users oauth credentials
-     * @return
-     */
-    protected Facebook getNextFacebookClient() {
-            ConfigurationBuilder cb = new ConfigurationBuilder();
-            cb.setDebugEnabled(true);
-            cb.setOAuthPermissions(READ_ONLY);
-            cb.setOAuthAppId(this.config.getOauth().getAppId());
-            cb.setOAuthAppSecret(this.config.getOauth().getAppSecret());
-            if(this.authTokens.numAvailableTokens() > 0)
-                cb.setOAuthAccessToken(this.authTokens.getNextAvailableToken());
-            else {
-                cb.setOAuthAccessToken(this.config.getOauth().getAppAccessToken());
-                LOGGER.debug("appAccessToken : {}", this.config.getOauth().getAppAccessToken());
-            }
-            cb.setJSONStoreEnabled(true);
-            if(!Strings.isNullOrEmpty(config.getVersion()))
-                cb.setRestBaseURL("https://graph.facebook.com/" + config.getVersion() + "/");
-            LOGGER.debug("appId : {}", this.config.getOauth().getAppId());
-            LOGGER.debug("appSecret: {}", this.config.getOauth().getAppSecret());
-            FacebookFactory ff = new FacebookFactory(cb.build());
-            return  ff.getInstance();
+  /**
+   * Returns true when the collector has finished querying facebook and has queued all data
+   * for the provider.
+   * @return isComplete
+   */
+  public boolean isComplete() {
+    return this.isComplete.get();
+  }
+
+  /**
+   * Queues facebook data.
+   * @param data data
+   * @param id id
+   */
+  protected void outputData(Object data, String id) {
+    try {
+      this.queue.put(new StreamsDatum(data, id));
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
     }
+  }
 
-    /**
-     * Queries facebook and queues the resulting data
-     * @param id
-     * @throws Exception
-     */
-    protected abstract void getData(IdConfig id) throws Exception;
-
-
-    @Override
-    public void run() {
-        for( IdConfig id : this.config.getIds()) {
-            try {
-                getData(id);
-            } catch (InterruptedException ie) {
-                Thread.currentThread().interrupt();
-            } catch (Exception e) {
-                LOGGER.error("Caught Exception while trying to poll data for page : {}", id);
-                LOGGER.error("Exception while getting page feed data: {}", e);
-            }
-        }
-        this.isComplete.set(true);
+  /**
+   * Gets a Facebook client.  If multiple authenticated users for this app are available
+   * it will rotate through the users oauth credentials
+   * @return client
+   */
+  protected Facebook getNextFacebookClient() {
+    ConfigurationBuilder cb = new ConfigurationBuilder();
+    cb.setDebugEnabled(true);
+    cb.setOAuthPermissions(READ_ONLY);
+    cb.setOAuthAppId(this.config.getOauth().getAppId());
+    cb.setOAuthAppSecret(this.config.getOauth().getAppSecret());
+    if (this.authTokens.numAvailableTokens() > 0) {
+      cb.setOAuthAccessToken(this.authTokens.getNextAvailableToken());
+    } else {
+      cb.setOAuthAccessToken(this.config.getOauth().getAppAccessToken());
+      LOGGER.debug("appAccessToken : {}", this.config.getOauth().getAppAccessToken());
+    }
+    cb.setJSONStoreEnabled(true);
+    if (!Strings.isNullOrEmpty(config.getVersion())) {
+      cb.setRestBaseURL("https://graph.facebook.com/" + config.getVersion() + "/");
     }
+    LOGGER.debug("appId : {}", this.config.getOauth().getAppId());
+    LOGGER.debug("appSecret: {}", this.config.getOauth().getAppSecret());
+    FacebookFactory ff = new FacebookFactory(cb.build());
+    return  ff.getInstance();
+  }
+
+  /**
+   * Queries facebook and queues the resulting data.
+   * @param id id
+   * @throws Exception Exception
+   */
+  protected abstract void getData(IdConfig id) throws Exception;
 
-    @VisibleForTesting
-    protected BlockingQueue<StreamsDatum> getQueue() {
-        return queue;
+
+  @Override
+  public void run() {
+    for ( IdConfig id : this.config.getIds()) {
+      try {
+        getData(id);
+      } catch (InterruptedException ie) {
+        Thread.currentThread().interrupt();
+      } catch (Exception ex) {
+        LOGGER.error("Caught Exception while trying to poll data for page : {}", id);
+        LOGGER.error("Exception while getting page feed data: {}", ex);
+      }
     }
+    this.isComplete.set(true);
+  }
+
+  @VisibleForTesting
+  protected BlockingQueue<StreamsDatum> getQueue() {
+    return queue;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookEventClassifier.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookEventClassifier.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookEventClassifier.java
index 16e2a25..47c2afb 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookEventClassifier.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookEventClassifier.java
@@ -18,40 +18,50 @@
 
 package org.apache.streams.facebook.provider;
 
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Preconditions;
-import org.apache.commons.lang.StringUtils;
-
-import java.io.IOException;
-
 import org.apache.streams.facebook.Page;
 import org.apache.streams.facebook.Post;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Preconditions;
+
+import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+
+/**
+ * FacebookEventClassifier classifies facebook events.
+ */
 public class FacebookEventClassifier {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(FacebookEventClassifier.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookEventClassifier.class);
 
-    public static Class detectClass( String json ) {
+  /**
+   * detectClass from json string.
+   * @param json json string
+   * @return detected Class
+   */
+  public static Class detectClass( String json ) {
 
-        Preconditions.checkNotNull(json);
-        Preconditions.checkArgument(StringUtils.isNotEmpty(json));
+    Preconditions.checkNotNull(json);
+    Preconditions.checkArgument(StringUtils.isNotEmpty(json));
 
-        ObjectNode objectNode;
-        try {
-            objectNode = (ObjectNode) StreamsJacksonMapper.getInstance().readTree(json);
-        } catch (IOException e) {
-            LOGGER.error("Exception while trying to detect class: {}", e.getMessage());
-            return null;
-        }
+    ObjectNode objectNode;
+    try {
+      objectNode = (ObjectNode) StreamsJacksonMapper.getInstance().readTree(json);
+    } catch (IOException ex) {
+      LOGGER.error("Exception while trying to detect class: {}", ex.getMessage());
+      return null;
+    }
 
-        if( objectNode.findValue("about") != null)
-            return Page.class;
-        else if( objectNode.findValue("statusType") != null )
-            return Post.class;
-        else
-            return Post.class;
+    if ( objectNode.findValue("about") != null) {
+      return Page.class;
+    } else if ( objectNode.findValue("statusType") != null ) {
+      return Post.class;
+    } else {
+      return Post.class;
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendFeedProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendFeedProvider.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendFeedProvider.java
index 231ee4f..3253479 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendFeedProvider.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendFeedProvider.java
@@ -18,15 +18,6 @@
 
 package org.apache.streams.facebook.provider;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Queues;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigRenderOptions;
-import facebook4j.*;
-import facebook4j.conf.ConfigurationBuilder;
-import facebook4j.json.DataObjectFactory;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.DatumStatusCounter;
 import org.apache.streams.core.StreamsDatum;
@@ -36,253 +27,295 @@ import org.apache.streams.facebook.FacebookUserInformationConfiguration;
 import org.apache.streams.facebook.FacebookUserstreamConfiguration;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Queues;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigRenderOptions;
+
+import org.apache.commons.lang.NotImplementedException;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.commons.lang.NotImplementedException;
 
 import java.io.IOException;
 import java.io.Serializable;
 import java.math.BigInteger;
 import java.util.Iterator;
 import java.util.Queue;
-import java.util.concurrent.*;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
-public class FacebookFriendFeedProvider implements StreamsProvider, Serializable
-{
-
-    public static final String STREAMS_ID = "FacebookFriendFeedProvider";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookFriendFeedProvider.class);
-
-    private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+import facebook4j.Facebook;
+import facebook4j.FacebookException;
+import facebook4j.FacebookFactory;
+import facebook4j.Friend;
+import facebook4j.Paging;
+import facebook4j.Post;
+import facebook4j.ResponseList;
+import facebook4j.conf.ConfigurationBuilder;
+import facebook4j.json.DataObjectFactory;
 
-    private static final String ALL_PERMISSIONS = "ads_management,ads_read,create_event,create_note,email,export_stream,friends_about_me,friends_actions.books,friends_actions.music,friends_actions.news,friends_actions.video,friends_activities,friends_birthday,friends_education_history,friends_events,friends_games_activity,friends_groups,friends_hometown,friends_interests,friends_likes,friends_location,friends_notes,friends_online_presence,friends_photo_video_tags,friends_photos,friends_questions,friends_relationship_details,friends_relationships,friends_religion_politics,friends_status,friends_subscriptions,friends_videos,friends_website,friends_work_history,manage_friendlists,manage_notifications,manage_pages,photo_upload,publish_actions,publish_stream,read_friendlists,read_insights,read_mailbox,read_page_mailboxes,read_requests,read_stream,rsvp_event,share_item,sms,status_update,user_about_me,user_actions.books,user_actions.music,user_actions.news,user_actions.video,user_activitie
 s,user_birthday,user_education_history,user_events,user_friends,user_games_activity,user_groups,user_hometown,user_interests,user_likes,user_location,user_notes,user_online_presence,user_photo_video_tags,user_photos,user_questions,user_relationship_details,user_relationships,user_religion_politics,user_status,user_subscriptions,user_videos,user_website,user_work_history,video_upload,xmpp_login";
-    private FacebookUserstreamConfiguration configuration;
+public class FacebookFriendFeedProvider implements StreamsProvider, Serializable {
 
-    private Class klass;
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+  public static final String STREAMS_ID = "FacebookFriendFeedProvider";
 
-    protected volatile Queue<StreamsDatum> providerQueue = new LinkedBlockingQueue<StreamsDatum>();
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookFriendFeedProvider.class);
 
-    public FacebookUserstreamConfiguration getConfig()              { return configuration; }
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    public void setConfig(FacebookUserstreamConfiguration config)   { this.configuration = config; }
+  private static final String ALL_PERMISSIONS = "ads_management,ads_read,create_event,create_note,email,export_stream,friends_about_me,friends_actions.books,friends_actions.music,friends_actions.news,friends_actions.video,friends_activities,friends_birthday,friends_education_history,friends_events,friends_games_activity,friends_groups,friends_hometown,friends_interests,friends_likes,friends_location,friends_notes,friends_online_presence,friends_photo_video_tags,friends_photos,friends_questions,friends_relationship_details,friends_relationships,friends_religion_politics,friends_status,friends_subscriptions,friends_videos,friends_website,friends_work_history,manage_friendlists,manage_notifications,manage_pages,photo_upload,publish_actions,publish_stream,read_friendlists,read_insights,read_mailbox,read_page_mailboxes,read_requests,read_stream,rsvp_event,share_item,sms,status_update,user_about_me,user_actions.books,user_actions.music,user_actions.news,user_actions.video,user_activities,
 user_birthday,user_education_history,user_events,user_friends,user_games_activity,user_groups,user_hometown,user_interests,user_likes,user_location,user_notes,user_online_presence,user_photo_video_tags,user_photos,user_questions,user_relationship_details,user_relationships,user_religion_politics,user_status,user_subscriptions,user_videos,user_website,user_work_history,video_upload,xmpp_login";
+  private FacebookUserstreamConfiguration configuration;
 
-    protected Iterator<String[]> idsBatches;
+  private Class klass;
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    protected ExecutorService executor;
+  protected volatile Queue<StreamsDatum> providerQueue = new LinkedBlockingQueue<StreamsDatum>();
 
-    protected DateTime start;
-    protected DateTime end;
+  public FacebookUserstreamConfiguration getConfig() {
+    return configuration;
+  }
 
-    protected final AtomicBoolean running = new AtomicBoolean();
+  public void setConfig(FacebookUserstreamConfiguration config) {
+    this.configuration = config;
+  }
 
-    private DatumStatusCounter countersCurrent = new DatumStatusCounter();
-    private DatumStatusCounter countersTotal = new DatumStatusCounter();
+  protected Iterator<String[]> idsBatches;
 
-    private static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
-        return new ThreadPoolExecutor(nThreads, nThreads,
-                5000L, TimeUnit.MILLISECONDS,
-                new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
-    }
+  protected ExecutorService executor;
 
-    public FacebookFriendFeedProvider() {
-        Config config = StreamsConfigurator.config.getConfig("facebook");
-        FacebookUserInformationConfiguration configuration;
-        try {
-            configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return;
-        }
-    }
+  protected DateTime start;
+  protected DateTime end;
 
-    public FacebookFriendFeedProvider(FacebookUserstreamConfiguration config) {
-        this.configuration = config;
-    }
+  protected final AtomicBoolean running = new AtomicBoolean();
 
-    public FacebookFriendFeedProvider(Class klass) {
-        Config config = StreamsConfigurator.config.getConfig("facebook");
-        FacebookUserInformationConfiguration configuration;
-        try {
-            configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return;
-        }
-        this.klass = klass;
-    }
+  private DatumStatusCounter countersCurrent = new DatumStatusCounter();
+  private DatumStatusCounter countersTotal = new DatumStatusCounter();
 
-    public FacebookFriendFeedProvider(FacebookUserstreamConfiguration config, Class klass) {
-        this.configuration = config;
-        this.klass = klass;
-    }
+  private static ExecutorService newFixedThreadPoolWithQueueSize(int numThreads, int queueSize) {
+    return new ThreadPoolExecutor(numThreads, numThreads,
+        5000L, TimeUnit.MILLISECONDS,
+        new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
+  }
 
-    public Queue<StreamsDatum> getProviderQueue() {
-        return this.providerQueue;
+  /**
+   * FacebookFriendFeedProvider constructor - resolves FacebookUserInformationConfiguration from JVM 'facebook'.
+   */
+  public FacebookFriendFeedProvider() {
+    Config config = StreamsConfigurator.config.getConfig("facebook");
+    FacebookUserInformationConfiguration configuration;
+    try {
+      configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return;
     }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+  }
+
+  /**
+   * FacebookFriendFeedProvider constructor - uses supplied FacebookUserInformationConfiguration.
+   */
+  public FacebookFriendFeedProvider(FacebookUserstreamConfiguration config) {
+    this.configuration = config;
+  }
+
+  /**
+   * FacebookFriendFeedProvider constructor - output supplied Class.
+   * @param klass Class
+   */
+  public FacebookFriendFeedProvider(Class klass) {
+    Config config = StreamsConfigurator.config.getConfig("facebook");
+    FacebookUserInformationConfiguration configuration;
+    try {
+      configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return;
     }
-
-    @Override
-    public void startStream() {
-        shutdownAndAwaitTermination(executor);
-        running.set(true);
+    this.klass = klass;
+  }
+
+  public FacebookFriendFeedProvider(FacebookUserstreamConfiguration config, Class klass) {
+    this.configuration = config;
+    this.klass = klass;
+  }
+
+  public Queue<StreamsDatum> getProviderQueue() {
+    return this.providerQueue;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+    shutdownAndAwaitTermination(executor);
+    running.set(true);
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+
+    StreamsResultSet current;
+
+    synchronized (FacebookUserstreamProvider.class) {
+      current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(providerQueue));
+      current.setCounter(new DatumStatusCounter());
+      current.getCounter().add(countersCurrent);
+      countersTotal.add(countersCurrent);
+      countersCurrent = new DatumStatusCounter();
+      providerQueue.clear();
     }
 
-    public StreamsResultSet readCurrent() {
-
-        StreamsResultSet current;
-
-        synchronized (FacebookUserstreamProvider.class) {
-            current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(providerQueue));
-            current.setCounter(new DatumStatusCounter());
-            current.getCounter().add(countersCurrent);
-            countersTotal.add(countersCurrent);
-            countersCurrent = new DatumStatusCounter();
-            providerQueue.clear();
+    return current;
+
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    LOGGER.debug("{} readNew", STREAMS_ID);
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    LOGGER.debug("{} readRange", STREAMS_ID);
+    this.start = start;
+    this.end = end;
+    readCurrent();
+    StreamsResultSet result = (StreamsResultSet)providerQueue.iterator();
+    return result;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return running.get();
+  }
+
+  void shutdownAndAwaitTermination(ExecutorService pool) {
+    pool.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+        pool.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+          System.err.println("Pool did not terminate");
         }
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      pool.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
+    }
+  }
 
-        return current;
+  @Override
+  public void prepare(Object configurationObject) {
 
-    }
+    executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, 20));
 
-    public StreamsResultSet readNew(BigInteger sequence) {
-        LOGGER.debug("{} readNew", STREAMS_ID);
-        throw new NotImplementedException();
-    }
+    Preconditions.checkNotNull(providerQueue);
+    Preconditions.checkNotNull(this.klass);
+    Preconditions.checkNotNull(configuration.getOauth().getAppId());
+    Preconditions.checkNotNull(configuration.getOauth().getAppSecret());
+    Preconditions.checkNotNull(configuration.getOauth().getUserAccessToken());
 
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        LOGGER.debug("{} readRange", STREAMS_ID);
-        this.start = start;
-        this.end = end;
-        readCurrent();
-        StreamsResultSet result = (StreamsResultSet)providerQueue.iterator();
-        return result;
-    }
+    Facebook client = getFacebookClient();
 
-    @Override
-    public boolean isRunning() {
-        return running.get();
-    }
+    try {
+      ResponseList<Friend> friendResponseList = client.friends().getFriends();
+      Paging<Friend> friendPaging;
+      do {
 
-    void shutdownAndAwaitTermination(ExecutorService pool) {
-        pool.shutdown(); // Disable new tasks from being submitted
-        try {
-            // Wait a while for existing tasks to terminate
-            if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
-                pool.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!pool.awaitTermination(10, TimeUnit.SECONDS))
-                    System.err.println("Pool did not terminate");
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            pool.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
+        for ( Friend friend : friendResponseList ) {
+          executor.submit(new FacebookFriendFeedTask(this, friend.getId()));
         }
+        friendPaging = friendResponseList.getPaging();
+        friendResponseList = client.fetchNext(friendPaging);
+      }
+      while ( friendPaging != null
+              &&
+              friendResponseList != null );
+    } catch (FacebookException ex) {
+      ex.printStackTrace();
     }
 
-    @Override
-    public void prepare(Object o) {
+  }
 
-        executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, 20));
+  protected Facebook getFacebookClient() {
 
-        Preconditions.checkNotNull(providerQueue);
-        Preconditions.checkNotNull(this.klass);
-        Preconditions.checkNotNull(configuration.getOauth().getAppId());
-        Preconditions.checkNotNull(configuration.getOauth().getAppSecret());
-        Preconditions.checkNotNull(configuration.getOauth().getUserAccessToken());
+    ConfigurationBuilder cb = new ConfigurationBuilder();
+    cb.setDebugEnabled(true)
+        .setOAuthAppId(configuration.getOauth().getAppId())
+        .setOAuthAppSecret(configuration.getOauth().getAppSecret())
+        .setOAuthAccessToken(configuration.getOauth().getUserAccessToken())
+        .setOAuthPermissions(ALL_PERMISSIONS)
+        .setJSONStoreEnabled(true)
+        .setClientVersion("v1.0");
 
-        Facebook client = getFacebookClient();
+    FacebookFactory ff = new FacebookFactory(cb.build());
+    Facebook facebook = ff.getInstance();
 
-        try {
-            ResponseList<Friend> friendResponseList = client.friends().getFriends();
-            Paging<Friend> friendPaging;
-            do {
+    return facebook;
+  }
 
-                for( Friend friend : friendResponseList ) {
+  @Override
+  public void cleanUp() {
+    shutdownAndAwaitTermination(executor);
+  }
 
-                    executor.submit(new FacebookFriendFeedTask(this, friend.getId()));
-                }
-                friendPaging = friendResponseList.getPaging();
-                friendResponseList = client.fetchNext(friendPaging);
-            } while( friendPaging != null &&
-                    friendResponseList != null );
-        } catch (FacebookException e) {
-            e.printStackTrace();
-        }
+  private class FacebookFriendFeedTask implements Runnable {
 
-    }
+    FacebookFriendFeedProvider provider;
+    Facebook client;
+    String id;
 
-    protected Facebook getFacebookClient()
-    {
-        ConfigurationBuilder cb = new ConfigurationBuilder();
-        cb.setDebugEnabled(true)
-            .setOAuthAppId(configuration.getOauth().getAppId())
-            .setOAuthAppSecret(configuration.getOauth().getAppSecret())
-            .setOAuthAccessToken(configuration.getOauth().getUserAccessToken())
-            .setOAuthPermissions(ALL_PERMISSIONS)
-            .setJSONStoreEnabled(true)
-            .setClientVersion("v1.0");
-
-        FacebookFactory ff = new FacebookFactory(cb.build());
-        Facebook facebook = ff.getInstance();
-
-        return facebook;
+    public FacebookFriendFeedTask(FacebookFriendFeedProvider provider, String id) {
+      this.provider = provider;
+      this.id = id;
     }
 
     @Override
-    public void cleanUp() {
-        shutdownAndAwaitTermination(executor);
-    }
-
-    private class FacebookFriendFeedTask implements Runnable {
-
-        FacebookFriendFeedProvider provider;
-        Facebook client;
-        String id;
-
-        public FacebookFriendFeedTask(FacebookFriendFeedProvider provider, String id) {
-            this.provider = provider;
-            this.id = id;
+    public void run() {
+      client = provider.getFacebookClient();
+      try {
+        ResponseList<Post> postResponseList = client.getFeed(id);
+        Paging<Post> postPaging;
+        do {
+
+          for (Post item : postResponseList) {
+            String json = DataObjectFactory.getRawJSON(item);
+            org.apache.streams.facebook.Post post = mapper.readValue(json, org.apache.streams.facebook.Post.class);
+            try {
+              lock.readLock().lock();
+              ComponentUtils.offerUntilSuccess(new StreamsDatum(post), providerQueue);
+              countersCurrent.incrementAttempt();
+            } finally {
+              lock.readLock().unlock();
+            }
+          }
+          postPaging = postResponseList.getPaging();
+          postResponseList = client.fetchNext(postPaging);
         }
+        while ( postPaging != null
+                &&
+                postResponseList != null );
 
-        @Override
-        public void run() {
-            client = provider.getFacebookClient();
-                try {
-                    ResponseList<Post> postResponseList = client.getFeed(id);
-                    Paging<Post> postPaging;
-                    do {
-
-                        for (Post item : postResponseList) {
-                            String json = DataObjectFactory.getRawJSON(item);
-                            org.apache.streams.facebook.Post post = mapper.readValue(json, org.apache.streams.facebook.Post.class);
-                            try {
-                                lock.readLock().lock();
-                                ComponentUtils.offerUntilSuccess(new StreamsDatum(post), providerQueue);
-                                countersCurrent.incrementAttempt();
-                            } finally {
-                                lock.readLock().unlock();
-                            }
-                        }
-                        postPaging = postResponseList.getPaging();
-                        postResponseList = client.fetchNext(postPaging);
-                    } while( postPaging != null &&
-                            postResponseList != null );
-
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-        }
+      } catch (Exception ex) {
+        ex.printStackTrace();
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendUpdatesProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendUpdatesProvider.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendUpdatesProvider.java
index cda868e..50ac64a 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendUpdatesProvider.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookFriendUpdatesProvider.java
@@ -18,15 +18,6 @@
 
 package org.apache.streams.facebook.provider;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigRenderOptions;
-import facebook4j.*;
-import facebook4j.conf.ConfigurationBuilder;
-import facebook4j.json.DataObjectFactory;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.DatumStatusCounter;
 import org.apache.streams.core.StreamsDatum;
@@ -36,10 +27,18 @@ import org.apache.streams.facebook.FacebookUserInformationConfiguration;
 import org.apache.streams.facebook.FacebookUserstreamConfiguration;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigRenderOptions;
+
+import org.apache.commons.lang.NotImplementedException;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.commons.lang.NotImplementedException;
 
 import java.io.IOException;
 import java.io.Serializable;
@@ -47,246 +46,290 @@ import java.math.BigInteger;
 import java.util.Iterator;
 import java.util.Queue;
 import java.util.Set;
-import java.util.concurrent.*;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
-public class FacebookFriendUpdatesProvider implements StreamsProvider, Serializable
-{
-
-    public static final String STREAMS_ID = "FacebookFriendPostsProvider";
+import facebook4j.Facebook;
+import facebook4j.FacebookException;
+import facebook4j.FacebookFactory;
+import facebook4j.Friend;
+import facebook4j.Paging;
+import facebook4j.Post;
+import facebook4j.ResponseList;
+import facebook4j.conf.ConfigurationBuilder;
+import facebook4j.json.DataObjectFactory;
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookFriendUpdatesProvider.class);
+/**
+ * FacebookFriendUpdatesProvider provides updates from friend feed.
+ */
+public class FacebookFriendUpdatesProvider implements StreamsProvider, Serializable {
 
-    private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  public static final String STREAMS_ID = "FacebookFriendPostsProvider";
 
-    private static final String ALL_PERMISSIONS = "ads_management,ads_read,create_event,create_note,email,export_stream,friends_about_me,friends_actions.books,friends_actions.music,friends_actions.news,friends_actions.video,friends_activities,friends_birthday,friends_education_history,friends_events,friends_games_activity,friends_groups,friends_hometown,friends_interests,friends_likes,friends_location,friends_notes,friends_online_presence,friends_photo_video_tags,friends_photos,friends_questions,friends_relationship_details,friends_relationships,friends_religion_politics,friends_status,friends_subscriptions,friends_videos,friends_website,friends_work_history,manage_friendlists,manage_notifications,manage_pages,photo_upload,publish_actions,publish_stream,read_friendlists,read_insights,read_mailbox,read_page_mailboxes,read_requests,read_stream,rsvp_event,share_item,sms,status_update,user_about_me,user_actions.books,user_actions.music,user_actions.news,user_actions.video,user_activitie
 s,user_birthday,user_education_history,user_events,user_friends,user_games_activity,user_groups,user_hometown,user_interests,user_likes,user_location,user_notes,user_online_presence,user_photo_video_tags,user_photos,user_questions,user_relationship_details,user_relationships,user_religion_politics,user_status,user_subscriptions,user_videos,user_website,user_work_history,video_upload,xmpp_login";
-    private FacebookUserstreamConfiguration configuration;
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookFriendUpdatesProvider.class);
 
-    private Class klass;
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    protected volatile Queue<StreamsDatum> providerQueue = new LinkedBlockingQueue<StreamsDatum>();
+  private static final String ALL_PERMISSIONS =
+      "ads_management,ads_read,create_event,create_note,email,export_stream,friends_about_me,friends_actions.books,friends_actions.music,friends_actions.news,friends_actions.video,friends_activities,friends_birthday,friends_education_history,friends_events,friends_games_activity,friends_groups,friends_hometown,friends_interests,friends_likes,friends_location,friends_notes,friends_online_presence,friends_photo_video_tags,friends_photos,friends_questions,friends_relationship_details,friends_relationships,friends_religion_politics,friends_status,friends_subscriptions,friends_videos,friends_website,friends_work_history,manage_friendlists,manage_notifications,manage_pages,photo_upload,publish_actions,publish_stream,read_friendlists,read_insights,read_mailbox,read_page_mailboxes,read_requests,read_stream,rsvp_event,share_item,sms,status_update,user_about_me,user_actions.books,user_actions.music,user_actions.news,user_actions.video,user_activities,user_birthday,user_education_history,user_
 events,user_friends,user_games_activity,user_groups,user_hometown,user_interests,user_likes,user_location,user_notes,user_online_presence,user_photo_video_tags,user_photos,user_questions,user_relationship_details,user_relationships,user_religion_politics,user_status,user_subscriptions,user_videos,user_website,user_work_history,video_upload,xmpp_login";
 
-    public FacebookUserstreamConfiguration getConfig()              { return configuration; }
+  private FacebookUserstreamConfiguration configuration;
 
-    public void setConfig(FacebookUserstreamConfiguration config)   { this.configuration = config; }
+  private Class klass;
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    protected Iterator<String[]> idsBatches;
+  protected volatile Queue<StreamsDatum> providerQueue = new LinkedBlockingQueue<StreamsDatum>();
 
-    protected ExecutorService executor;
+  public FacebookUserstreamConfiguration getConfig() {
+    return configuration;
+  }
 
-    protected DateTime start;
-    protected DateTime end;
+  public void setConfig(FacebookUserstreamConfiguration config) {
+    this.configuration = config;
+  }
 
-    protected final AtomicBoolean running = new AtomicBoolean();
+  protected Iterator<String[]> idsBatches;
 
-    private DatumStatusCounter countersCurrent = new DatumStatusCounter();
-    private DatumStatusCounter countersTotal = new DatumStatusCounter();
+  protected ExecutorService executor;
 
-    private static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
-        return new ThreadPoolExecutor(nThreads, nThreads,
-                5000L, TimeUnit.MILLISECONDS,
-                new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
-    }
+  protected DateTime start;
+  protected DateTime end;
 
-    public FacebookFriendUpdatesProvider() {
-        Config config = StreamsConfigurator.config.getConfig("facebook");
-        FacebookUserInformationConfiguration configuration;
-        try {
-            configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return;
-        }
-    }
+  protected final AtomicBoolean running = new AtomicBoolean();
 
-    public FacebookFriendUpdatesProvider(FacebookUserstreamConfiguration config) {
-        this.configuration = config;
-    }
+  private DatumStatusCounter countersCurrent = new DatumStatusCounter();
+  private DatumStatusCounter countersTotal = new DatumStatusCounter();
 
-    public FacebookFriendUpdatesProvider(Class klass) {
-        Config config = StreamsConfigurator.config.getConfig("facebook");
-        FacebookUserInformationConfiguration configuration;
-        try {
-            configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return;
-        }
-        this.klass = klass;
-    }
+  // TODO: factor this out.
+  private static ExecutorService newFixedThreadPoolWithQueueSize(int numThreads, int queueSize) {
+    return new ThreadPoolExecutor(numThreads, numThreads,
+        5000L, TimeUnit.MILLISECONDS,
+        new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
+  }
 
-    public FacebookFriendUpdatesProvider(FacebookUserstreamConfiguration config, Class klass) {
-        this.configuration = config;
-        this.klass = klass;
+  /**
+   * FacebookFriendUpdatesProvider constructor - resolves FacebookUserInformationConfiguration from JVM 'facebook'.
+   */
+  public FacebookFriendUpdatesProvider() {
+    Config config = StreamsConfigurator.config.getConfig("facebook");
+    FacebookUserInformationConfiguration configuration;
+    try {
+      configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return;
     }
-
-    public Queue<StreamsDatum> getProviderQueue() {
-        return this.providerQueue;
+  }
+
+  /**
+   * FacebookFriendUpdatesProvider constructor - uses supplied FacebookUserstreamConfiguration.
+   */
+  public FacebookFriendUpdatesProvider(FacebookUserstreamConfiguration config) {
+    this.configuration = config;
+  }
+
+  /**
+   * FacebookFriendUpdatesProvider constructor.
+   * uses supplied output Class.
+   * resolves FacebookUserInformationConfiguration from JVM 'facebook.
+   */
+  public FacebookFriendUpdatesProvider(Class klass) {
+    Config config = StreamsConfigurator.config.getConfig("facebook");
+    FacebookUserInformationConfiguration configuration;
+    try {
+      configuration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return;
     }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public void startStream() {
-        running.set(true);
+    this.klass = klass;
+  }
+
+  /**
+   * FacebookFriendUpdatesProvider constructor.
+   * uses supplied FacebookUserstreamConfiguration.
+   * uses supplied output Class.
+   */
+  public FacebookFriendUpdatesProvider(FacebookUserstreamConfiguration config, Class klass) {
+    this.configuration = config;
+    this.klass = klass;
+  }
+
+  public Queue<StreamsDatum> getProviderQueue() {
+    return this.providerQueue;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+    running.set(true);
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+
+    Preconditions.checkArgument(idsBatches.hasNext());
+
+    LOGGER.info("readCurrent");
+
+    // return stuff
+
+    LOGGER.info("Finished.  Cleaning up...");
+
+    LOGGER.info("Providing {} docs", providerQueue.size());
+
+    StreamsResultSet result =  new StreamsResultSet(providerQueue);
+    running.set(false);
+
+    LOGGER.info("Exiting");
+
+    return result;
+
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    LOGGER.debug("{} readNew", STREAMS_ID);
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    LOGGER.debug("{} readRange", STREAMS_ID);
+    this.start = start;
+    this.end = end;
+    readCurrent();
+    StreamsResultSet result = (StreamsResultSet)providerQueue.iterator();
+    return result;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return running.get();
+  }
+
+  void shutdownAndAwaitTermination(ExecutorService pool) {
+    pool.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+        pool.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+          System.err.println("Pool did not terminate");
+        }
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      pool.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
     }
+  }
 
-    public StreamsResultSet readCurrent() {
-
-        Preconditions.checkArgument(idsBatches.hasNext());
-
-        LOGGER.info("readCurrent");
-
-        // return stuff
+  @Override
+  public void prepare(Object configurationObject) {
 
-        LOGGER.info("Finished.  Cleaning up...");
+    executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, 20));
 
-        LOGGER.info("Providing {} docs", providerQueue.size());
+    Preconditions.checkNotNull(providerQueue);
+    Preconditions.checkNotNull(this.klass);
+    Preconditions.checkNotNull(configuration.getOauth().getAppId());
+    Preconditions.checkNotNull(configuration.getOauth().getAppSecret());
+    Preconditions.checkNotNull(configuration.getOauth().getUserAccessToken());
 
-        StreamsResultSet result =  new StreamsResultSet(providerQueue);
-        running.set(false);
+    Facebook client = getFacebookClient();
 
-        LOGGER.info("Exiting");
-
-        return result;
+    try {
+      ResponseList<Friend> friendResponseList = client.friends().getFriends();
+      Paging<Friend> friendPaging;
+      do {
 
+        for ( Friend friend : friendResponseList ) {
+          // client.rawAPI().callPostAPI();
+          // add a subscription
+        }
+        friendPaging = friendResponseList.getPaging();
+        friendResponseList = client.fetchNext(friendPaging);
+      }
+      while ( friendPaging != null
+              &&
+              friendResponseList != null );
+    } catch (FacebookException ex) {
+      ex.printStackTrace();
     }
 
-    public StreamsResultSet readNew(BigInteger sequence) {
-        LOGGER.debug("{} readNew", STREAMS_ID);
-        throw new NotImplementedException();
-    }
+  }
 
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        LOGGER.debug("{} readRange", STREAMS_ID);
-        this.start = start;
-        this.end = end;
-        readCurrent();
-        StreamsResultSet result = (StreamsResultSet)providerQueue.iterator();
-        return result;
-    }
+  protected Facebook getFacebookClient() {
 
-    @Override
-    public boolean isRunning() {
-        return running.get();
-    }
+    ConfigurationBuilder cb = new ConfigurationBuilder();
+    cb.setDebugEnabled(true)
+        .setOAuthAppId(configuration.getOauth().getAppId())
+        .setOAuthAppSecret(configuration.getOauth().getAppSecret())
+        .setOAuthAccessToken(configuration.getOauth().getUserAccessToken())
+        .setOAuthPermissions(ALL_PERMISSIONS)
+        .setJSONStoreEnabled(true)
+        .setClientVersion("v1.0");
 
-    void shutdownAndAwaitTermination(ExecutorService pool) {
-        pool.shutdown(); // Disable new tasks from being submitted
-        try {
-            // Wait a while for existing tasks to terminate
-            if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
-                pool.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!pool.awaitTermination(10, TimeUnit.SECONDS))
-                    System.err.println("Pool did not terminate");
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            pool.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
-        }
-    }
-
-    @Override
-    public void prepare(Object o) {
+    FacebookFactory ff = new FacebookFactory(cb.build());
+    Facebook facebook = ff.getInstance();
 
-        executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, 20));
+    return facebook;
+  }
 
-        Preconditions.checkNotNull(providerQueue);
-        Preconditions.checkNotNull(this.klass);
-        Preconditions.checkNotNull(configuration.getOauth().getAppId());
-        Preconditions.checkNotNull(configuration.getOauth().getAppSecret());
-        Preconditions.checkNotNull(configuration.getOauth().getUserAccessToken());
+  @Override
+  public void cleanUp() {
+    shutdownAndAwaitTermination(executor);
+  }
 
-        Facebook client = getFacebookClient();
+  private class FacebookFeedPollingTask implements Runnable {
 
-        try {
-            ResponseList<Friend> friendResponseList = client.friends().getFriends();
-            Paging<Friend> friendPaging;
-            do {
-
-                for( Friend friend : friendResponseList ) {
-
-                    //client.rawAPI().callPostAPI();
-                    // add a subscription
-                }
-                friendPaging = friendResponseList.getPaging();
-                friendResponseList = client.fetchNext(friendPaging);
-            } while( friendPaging != null &&
-                    friendResponseList != null );
-        } catch (FacebookException e) {
-            e.printStackTrace();
-        }
+    FacebookUserstreamProvider provider;
+    Facebook client;
 
-    }
+    private Set<Post> priorPollResult = Sets.newHashSet();
 
-    protected Facebook getFacebookClient()
-    {
-        ConfigurationBuilder cb = new ConfigurationBuilder();
-        cb.setDebugEnabled(true)
-            .setOAuthAppId(configuration.getOauth().getAppId())
-            .setOAuthAppSecret(configuration.getOauth().getAppSecret())
-            .setOAuthAccessToken(configuration.getOauth().getUserAccessToken())
-            .setOAuthPermissions(ALL_PERMISSIONS)
-            .setJSONStoreEnabled(true)
-            .setClientVersion("v1.0");
-
-        FacebookFactory ff = new FacebookFactory(cb.build());
-        Facebook facebook = ff.getInstance();
-
-        return facebook;
+    public FacebookFeedPollingTask(FacebookUserstreamProvider facebookUserstreamProvider) {
+      provider = facebookUserstreamProvider;
     }
 
     @Override
-    public void cleanUp() {
-        shutdownAndAwaitTermination(executor);
-    }
-
-    private class FacebookFeedPollingTask implements Runnable {
-
-        FacebookUserstreamProvider provider;
-        Facebook client;
-
-        private Set<Post> priorPollResult = Sets.newHashSet();
-
-        public FacebookFeedPollingTask(FacebookUserstreamProvider facebookUserstreamProvider) {
-            provider = facebookUserstreamProvider;
-        }
-
-        @Override
-        public void run() {
-            client = provider.getFacebookClient();
-            while (provider.isRunning()) {
-                try {
-                    ResponseList<Post> postResponseList = client.getHome();
-                    Set<Post> update = Sets.newHashSet(postResponseList);
-                    Set<Post> repeats = Sets.intersection(priorPollResult, Sets.newHashSet(update));
-                    Set<Post> entrySet = Sets.difference(update, repeats);
-                    for (Post item : entrySet) {
-                        String json = DataObjectFactory.getRawJSON(item);
-                        org.apache.streams.facebook.Post post = mapper.readValue(json, org.apache.streams.facebook.Post.class);
-                        try {
-                            lock.readLock().lock();
-                            ComponentUtils.offerUntilSuccess(new StreamsDatum(post), providerQueue);
-                            countersCurrent.incrementAttempt();
-                        } finally {
-                            lock.readLock().unlock();
-                        }
-                    }
-                    priorPollResult = update;
-                    Thread.sleep(configuration.getPollIntervalMillis());
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
+    public void run() {
+      client = provider.getFacebookClient();
+      while (provider.isRunning()) {
+        try {
+          ResponseList<Post> postResponseList = client.getHome();
+          Set<Post> update = Sets.newHashSet(postResponseList);
+          Set<Post> repeats = Sets.intersection(priorPollResult, Sets.newHashSet(update));
+          Set<Post> entrySet = Sets.difference(update, repeats);
+          for (Post item : entrySet) {
+            String json = DataObjectFactory.getRawJSON(item);
+            org.apache.streams.facebook.Post post = mapper.readValue(json, org.apache.streams.facebook.Post.class);
+            try {
+              lock.readLock().lock();
+              ComponentUtils.offerUntilSuccess(new StreamsDatum(post), providerQueue);
+              countersCurrent.incrementAttempt();
+            } finally {
+              lock.readLock().unlock();
             }
+          }
+          priorPollResult = update;
+          Thread.sleep(configuration.getPollIntervalMillis());
+        } catch (Exception ex) {
+          ex.printStackTrace();
         }
+      }
     }
+  }
 }



[17/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeActivityUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeActivityUtil.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeActivityUtil.java
index ab2f55c..4754353 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeActivityUtil.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeActivityUtil.java
@@ -19,6 +19,13 @@
 
 package com.youtube.serializer;
 
+import org.apache.streams.exceptions.ActivitySerializerException;
+import org.apache.streams.pojo.extensions.ExtensionUtil;
+import org.apache.streams.pojo.json.Activity;
+import org.apache.streams.pojo.json.ActivityObject;
+import org.apache.streams.pojo.json.Image;
+import org.apache.streams.pojo.json.Provider;
+
 import com.google.api.client.util.Maps;
 import com.google.api.services.youtube.model.Channel;
 import com.google.api.services.youtube.model.Thumbnail;
@@ -27,12 +34,6 @@ import com.google.api.services.youtube.model.Video;
 import com.google.common.base.Joiner;
 import com.google.common.base.Optional;
 import com.google.common.collect.Lists;
-import org.apache.streams.exceptions.ActivitySerializerException;
-import org.apache.streams.pojo.extensions.ExtensionUtil;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
-import org.apache.streams.pojo.json.Image;
-import org.apache.streams.pojo.json.Provider;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -41,153 +42,160 @@ import java.util.HashMap;
 import java.util.Map;
 
 public class YoutubeActivityUtil {
-    private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeActivityUtil.class);
-
-    /**
-     * Given a {@link com.google.api.services.youtube.YouTube.Videos} object and an
-     * {@link org.apache.streams.pojo.json.Activity} object, fill out the appropriate details
-     *
-     * @param video
-     * @param activity
-     * @throws org.apache.streams.exceptions.ActivitySerializerException
-     */
-    public static void updateActivity(Video video, Activity activity, String channelId) throws ActivitySerializerException {
-        activity.setActor(buildActor(video, video.getSnippet().getChannelId()));
-        activity.setVerb("post");
-
-        activity.setId(formatId(activity.getVerb(),
-                Optional.fromNullable(
-                        video.getId())
-                        .orNull()));
-
-        activity.setPublished(new DateTime(video.getSnippet().getPublishedAt().getValue()));
-        activity.setTitle(video.getSnippet().getTitle());
-        activity.setContent(video.getSnippet().getDescription());
-        activity.setUrl("https://www.youtube.com/watch?v=" + video.getId());
-
-        activity.setProvider(getProvider());
-
-        activity.setObject(buildActivityObject(video));
-
-        addYoutubeExtensions(activity, video);
-    }
-
 
-    /**
-     * Given a {@link com.google.api.services.youtube.model.Channel} object and an
-     * {@link org.apache.streams.pojo.json.Activity} object, fill out the appropriate details
-     *
-     * @param channel
-     * @param activity
-     * @throws org.apache.streams.exceptions.ActivitySerializerException
-     */
-    public static void updateActivity(Channel channel, Activity activity, String channelId) throws ActivitySerializerException {
-        try {
-            activity.setProvider(getProvider());
-            activity.setVerb("post");
-            activity.setActor(createActorForChannel(channel));
-            Map<String, Object> extensions = Maps.newHashMap();
-            extensions.put("youtube", channel);
-            activity.setAdditionalProperty("extensions", extensions);
-        } catch (Throwable t) {
-            throw new ActivitySerializerException(t);
-        }
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeActivityUtil.class);
+
+  /**
+   * Given a {@link com.google.api.services.youtube.YouTube.Videos} object and an
+   * {@link org.apache.streams.pojo.json.Activity} object, fill out the appropriate details
+   *
+   * @param video Video
+   * @param activity Activity
+   * @throws ActivitySerializerException ActivitySerializerException
+   */
+  public static void updateActivity(Video video, Activity activity, String channelId) throws ActivitySerializerException {
+    activity.setActor(buildActor(video, video.getSnippet().getChannelId()));
+    activity.setVerb("post");
+
+    activity.setId(formatId(activity.getVerb(),
+        Optional.fromNullable(
+            video.getId())
+            .orNull()));
+
+    activity.setPublished(new DateTime(video.getSnippet().getPublishedAt().getValue()));
+    activity.setTitle(video.getSnippet().getTitle());
+    activity.setContent(video.getSnippet().getDescription());
+    activity.setUrl("https://www.youtube.com/watch?v=" + video.getId());
+
+    activity.setProvider(getProvider());
+
+    activity.setObject(buildActivityObject(video));
+
+    addYoutubeExtensions(activity, video);
+  }
+
+
+  /**
+   * Given a {@link com.google.api.services.youtube.model.Channel} object and an
+   * {@link org.apache.streams.pojo.json.Activity} object, fill out the appropriate details
+   *
+   * @param channel Channel
+   * @param activity Activity
+   * @throws ActivitySerializerException ActivitySerializerException
+   */
+  public static void updateActivity(Channel channel, Activity activity, String channelId) throws ActivitySerializerException {
+    try {
+      activity.setProvider(getProvider());
+      activity.setVerb("post");
+      activity.setActor(createActorForChannel(channel));
+      Map<String, Object> extensions = Maps.newHashMap();
+      extensions.put("youtube", channel);
+      activity.setAdditionalProperty("extensions", extensions);
+    } catch (Throwable throwable) {
+      throw new ActivitySerializerException(throwable);
     }
-
-    public static ActivityObject createActorForChannel(Channel channel) {
-        ActivityObject actor = new ActivityObject();
-        actor.setId("id:youtube:"+channel.getId());
-        actor.setSummary(channel.getSnippet().getDescription());
-        actor.setDisplayName(channel.getSnippet().getTitle());
-        Image image = new Image();
-        image.setUrl(channel.getSnippet().getThumbnails().getHigh().getUrl());
-        actor.setImage(image);
-        actor.setUrl("https://youtube.com/user/" + channel.getId());
-        Map<String, Object> actorExtensions = Maps.newHashMap();
-        actorExtensions.put("followers", channel.getStatistics().getSubscriberCount());
-        actorExtensions.put("posts", channel.getStatistics().getVideoCount());
-        actor.setAdditionalProperty("extensions", actorExtensions);
-        return actor;
+  }
+
+  /**
+   * createActorForChannel.
+   * @param channel Channel
+   * @return $.actor
+   */
+  public static ActivityObject createActorForChannel(Channel channel) {
+    ActivityObject actor = new ActivityObject();
+    // TODO: use generic provider id concatenator
+    actor.setId("id:youtube:" + channel.getId());
+    actor.setSummary(channel.getSnippet().getDescription());
+    actor.setDisplayName(channel.getSnippet().getTitle());
+    Image image = new Image();
+    image.setUrl(channel.getSnippet().getThumbnails().getHigh().getUrl());
+    actor.setImage(image);
+    actor.setUrl("https://youtube.com/user/" + channel.getId());
+    Map<String, Object> actorExtensions = Maps.newHashMap();
+    actorExtensions.put("followers", channel.getStatistics().getSubscriberCount());
+    actorExtensions.put("posts", channel.getStatistics().getVideoCount());
+    actor.setAdditionalProperty("extensions", actorExtensions);
+    return actor;
+  }
+
+  /**
+   * Given a video object, create the appropriate activity object with a valid image
+   * (thumbnail) and video URL.
+   * @param video Video
+   * @return Activity Object with Video URL and a thumbnail image
+   */
+  private static ActivityObject buildActivityObject(Video video) {
+    ActivityObject activityObject = new ActivityObject();
+
+    ThumbnailDetails thumbnailDetails = video.getSnippet().getThumbnails();
+    Thumbnail thumbnail = thumbnailDetails.getDefault();
+
+    if (thumbnail != null) {
+      Image image = new Image();
+      image.setUrl(thumbnail.getUrl());
+      image.setHeight(thumbnail.getHeight());
+      image.setWidth(thumbnail.getWidth());
+
+      activityObject.setImage(image);
     }
 
-    /**
-     * Given a video object, create the appropriate activity object with a valid image
-     * (thumbnail) and video URL
-     * @param video
-     * @return Activity Object with Video URL and a thumbnail image
-     */
-    private static ActivityObject buildActivityObject(Video video) {
-        ActivityObject activityObject = new ActivityObject();
-
-        ThumbnailDetails thumbnailDetails = video.getSnippet().getThumbnails();
-        Thumbnail thumbnail = thumbnailDetails.getDefault();
-
-        if(thumbnail != null) {
-            Image image = new Image();
-            image.setUrl(thumbnail.getUrl());
-            image.setHeight(thumbnail.getHeight());
-            image.setWidth(thumbnail.getWidth());
-
-            activityObject.setImage(image);
-        }
+    activityObject.setUrl("https://www.youtube.com/watch?v=" + video.getId());
+    activityObject.setObjectType("video");
 
-        activityObject.setUrl("https://www.youtube.com/watch?v=" + video.getId());
-        activityObject.setObjectType("video");
+    return activityObject;
+  }
 
-        return activityObject;
-    }
-
-    /**
-     * Add the Youtube extensions to the Activity object that we're building
-     * @param activity
-     * @param video
-     */
-    private static void addYoutubeExtensions(Activity activity, Video video) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-
-        extensions.put("youtube", video);
-
-        if(video.getStatistics() != null) {
-            Map<String, Object> likes = new HashMap<>();
-            likes.put("count", video.getStatistics().getCommentCount());
-            extensions.put("likes", likes);
-        }
-    }
+  /**
+   * Add the Youtube extensions to the Activity object that we're building.
+   * @param activity Activity
+   * @param video Video
+   */
+  private static void addYoutubeExtensions(Activity activity, Video video) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
 
-    /**
-     * Build an {@link org.apache.streams.pojo.json.ActivityObject} actor given the video object
-     * @param video
-     * @param id
-     * @return Actor object
-     */
-    private static ActivityObject buildActor(Video video, String id) {
-        ActivityObject actor = new ActivityObject();
-
-        actor.setId("id:youtube:" + id);
-        actor.setDisplayName(video.getSnippet().getChannelTitle());
-        actor.setSummary(video.getSnippet().getDescription());
-        actor.setAdditionalProperty("handle", video.getSnippet().getChannelTitle());
-
-        return actor;
-    }
-
-    /**
-     * Gets the common youtube {@link org.apache.streams.pojo.json.Provider} object
-     * @return a provider object representing YouTube
-     */
-    public static Provider getProvider() {
-        Provider provider = new Provider();
-        provider.setId("id:providers:youtube");
-        provider.setDisplayName("YouTube");
-        return provider;
-    }
+    extensions.put("youtube", video);
 
-    /**
-     * Formats the ID to conform with the Apache Streams activity ID convention
-     * @param idparts the parts of the ID to join
-     * @return a valid Activity ID in format "id:youtube:part1:part2:...partN"
-     */
-    public static String formatId(String... idparts) {
-        return Joiner.on(":").join(Lists.asList("id:youtube", idparts));
+    if (video.getStatistics() != null) {
+      Map<String, Object> likes = new HashMap<>();
+      likes.put("count", video.getStatistics().getCommentCount());
+      extensions.put("likes", likes);
     }
+  }
+
+  /**
+   * Build an {@link org.apache.streams.pojo.json.ActivityObject} actor given the video object
+   * @param video Video
+   * @param id id
+   * @return Actor object
+   */
+  private static ActivityObject buildActor(Video video, String id) {
+    ActivityObject actor = new ActivityObject();
+
+    actor.setId("id:youtube:" + id);
+    actor.setDisplayName(video.getSnippet().getChannelTitle());
+    actor.setSummary(video.getSnippet().getDescription());
+    actor.setAdditionalProperty("handle", video.getSnippet().getChannelTitle());
+
+    return actor;
+  }
+
+  /**
+   * Gets the common youtube {@link org.apache.streams.pojo.json.Provider} object
+   * @return a provider object representing YouTube
+   */
+  public static Provider getProvider() {
+    Provider provider = new Provider();
+    provider.setId("id:providers:youtube");
+    provider.setDisplayName("YouTube");
+    return provider;
+  }
+
+  /**
+   * Formats the ID to conform with the Apache Streams activity ID convention
+   * @param idparts the parts of the ID to join
+   * @return a valid Activity ID in format "id:youtube:part1:part2:...partN"
+   */
+  public static String formatId(String... idparts) {
+    return Joiner.on(":").join(Lists.asList("id:youtube", idparts));
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeChannelDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeChannelDeserializer.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeChannelDeserializer.java
index 019c7c5..ea9a49d 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeChannelDeserializer.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeChannelDeserializer.java
@@ -40,111 +40,116 @@ import java.util.Iterator;
 import java.util.List;
 
 /**
- *
+ * YoutubeChannelDeserializer is a JsonDeserializer for Channel.
  */
 public class YoutubeChannelDeserializer extends JsonDeserializer<Channel> {
 
-
-    @Override
-    public Channel deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
-        JsonNode node = jp.getCodec().readTree(jp);
-        try {
-            Channel channel = new Channel();
-            if(node.findPath("etag") != null)
-                channel.setEtag(node.get("etag").asText());
-            if(node.findPath("kind") != null)
-                channel.setKind(node.get("kind").asText());
-            channel.setId(node.get("id").asText());
-            channel.setTopicDetails(setTopicDetails(node.findValue("topicDetails")));
-            channel.setStatistics(setChannelStatistics(node.findValue("statistics")));
-            channel.setContentDetails(setContentDetails(node.findValue("contentDetails")));
-            channel.setSnippet(setChannelSnippet(node.findValue("snippet")));
-            return channel;
-        } catch (Throwable t) {
-            throw new IOException(t);
-        }
+  @Override
+  public Channel deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
+    JsonNode node = jp.getCodec().readTree(jp);
+    try {
+      Channel channel = new Channel();
+      if (node.findPath("etag") != null) {
+        channel.setEtag(node.get("etag").asText());
+      }
+      if (node.findPath("kind") != null) {
+        channel.setKind(node.get("kind").asText());
+      }
+      channel.setId(node.get("id").asText());
+      channel.setTopicDetails(setTopicDetails(node.findValue("topicDetails")));
+      channel.setStatistics(setChannelStatistics(node.findValue("statistics")));
+      channel.setContentDetails(setContentDetails(node.findValue("contentDetails")));
+      channel.setSnippet(setChannelSnippet(node.findValue("snippet")));
+      return channel;
+    } catch (Throwable throwable) {
+      throw new IOException(throwable);
     }
+  }
 
-    protected ChannelSnippet setChannelSnippet(JsonNode node) {
-        ChannelSnippet snippet = new ChannelSnippet();
-        snippet.setTitle(node.get("title").asText());
-        snippet.setDescription(node.get("description").asText());
-        snippet.setPublishedAt(new DateTime(node.get("publishedAt").get("value").longValue()));
-        snippet.setLocalized(setLocalized(node.findValue("localized")));
-        snippet.setThumbnails(setThumbnails(node.findValue("thumbnails")));
-        return snippet;
-    }
+  protected ChannelSnippet setChannelSnippet(JsonNode node) {
+    ChannelSnippet snippet = new ChannelSnippet();
+    snippet.setTitle(node.get("title").asText());
+    snippet.setDescription(node.get("description").asText());
+    snippet.setPublishedAt(new DateTime(node.get("publishedAt").get("value").longValue()));
+    snippet.setLocalized(setLocalized(node.findValue("localized")));
+    snippet.setThumbnails(setThumbnails(node.findValue("thumbnails")));
+    return snippet;
+  }
 
-    protected ThumbnailDetails setThumbnails(JsonNode node) {
-        ThumbnailDetails details = new ThumbnailDetails();
-        if(node == null) {
-            return details;
-        }
-        details.setDefault(new Thumbnail().setUrl(node.get("default").get("url").asText()));
-        details.setHigh(new Thumbnail().setUrl(node.get("high").get("url").asText()));
-        details.setMedium(new Thumbnail().setUrl(node.get("medium").get("url").asText()));
-        return details;
+  protected ThumbnailDetails setThumbnails(JsonNode node) {
+    ThumbnailDetails details = new ThumbnailDetails();
+    if (node == null) {
+      return details;
     }
+    details.setDefault(new Thumbnail().setUrl(node.get("default").get("url").asText()));
+    details.setHigh(new Thumbnail().setUrl(node.get("high").get("url").asText()));
+    details.setMedium(new Thumbnail().setUrl(node.get("medium").get("url").asText()));
+    return details;
+  }
 
-    protected ChannelLocalization setLocalized(JsonNode node) {
-        if(node == null) {
-            return new ChannelLocalization();
-        }
-        ChannelLocalization localization = new ChannelLocalization();
-        localization.setDescription(node.get("description").asText());
-        localization.setTitle(node.get("title").asText());
-        return localization;
+  protected ChannelLocalization setLocalized(JsonNode node) {
+    if (node == null) {
+      return new ChannelLocalization();
     }
+    ChannelLocalization localization = new ChannelLocalization();
+    localization.setDescription(node.get("description").asText());
+    localization.setTitle(node.get("title").asText());
+    return localization;
+  }
 
-    protected ChannelContentDetails setContentDetails(JsonNode node) {
-        ChannelContentDetails contentDetails = new ChannelContentDetails();
-        if(node == null) {
-            return contentDetails;
-        }
-        if(node.findValue("googlePlusUserId") != null)
-            contentDetails.setGooglePlusUserId(node.get("googlePlusUserId").asText());
-        contentDetails.setRelatedPlaylists(setRelatedPlaylists(node.findValue("relatedPlaylists")));
-        return contentDetails;
+  protected ChannelContentDetails setContentDetails(JsonNode node) {
+    ChannelContentDetails contentDetails = new ChannelContentDetails();
+    if (node == null) {
+      return contentDetails;
+    }
+    if (node.findValue("googlePlusUserId") != null) {
+      contentDetails.setGooglePlusUserId(node.get("googlePlusUserId").asText());
     }
+    contentDetails.setRelatedPlaylists(setRelatedPlaylists(node.findValue("relatedPlaylists")));
+    return contentDetails;
+  }
 
-    protected ChannelContentDetails.RelatedPlaylists setRelatedPlaylists(JsonNode node) {
-        ChannelContentDetails.RelatedPlaylists playlists = new ChannelContentDetails.RelatedPlaylists();
-        if(node == null) {
-            return playlists;
-        }
-        if(node.findValue("favorites") != null)
-            playlists.setFavorites(node.get("favorites").asText());
-        if(node.findValue("likes") != null)
-            playlists.setLikes(node.get("likes").asText());
-        if(node.findValue("uploads") != null)
-            playlists.setUploads(node.get("uploads").asText());
-        return playlists;
+  protected ChannelContentDetails.RelatedPlaylists setRelatedPlaylists(JsonNode node) {
+    ChannelContentDetails.RelatedPlaylists playlists = new ChannelContentDetails.RelatedPlaylists();
+    if (node == null) {
+      return playlists;
     }
+    if (node.findValue("favorites") != null) {
+      playlists.setFavorites(node.get("favorites").asText());
+    }
+    if (node.findValue("likes") != null) {
+      playlists.setLikes(node.get("likes").asText());
+    }
+    if (node.findValue("uploads") != null) {
+      playlists.setUploads(node.get("uploads").asText());
+    }
+    return playlists;
+  }
 
-    protected ChannelStatistics setChannelStatistics(JsonNode node) {
-        ChannelStatistics stats = new ChannelStatistics();
-        if(node == null) {
-            return stats;
-        }
-        stats.setCommentCount(node.get("commentCount").bigIntegerValue());
-        stats.setHiddenSubscriberCount(node.get("hiddenSubscriberCount").asBoolean());
-        stats.setSubscriberCount(node.get("subscriberCount").bigIntegerValue());
-        stats.setVideoCount(node.get("videoCount").bigIntegerValue());
-        stats.setViewCount(node.get("viewCount").bigIntegerValue());
-        return stats;
+  protected ChannelStatistics setChannelStatistics(JsonNode node) {
+    ChannelStatistics stats = new ChannelStatistics();
+    if (node == null) {
+      return stats;
     }
+    stats.setCommentCount(node.get("commentCount").bigIntegerValue());
+    stats.setHiddenSubscriberCount(node.get("hiddenSubscriberCount").asBoolean());
+    stats.setSubscriberCount(node.get("subscriberCount").bigIntegerValue());
+    stats.setVideoCount(node.get("videoCount").bigIntegerValue());
+    stats.setViewCount(node.get("viewCount").bigIntegerValue());
+    return stats;
+  }
 
-    protected ChannelTopicDetails setTopicDetails(JsonNode node) {
-        ChannelTopicDetails details = new ChannelTopicDetails();
-        if(node == null) {
-            return details;
-        }
-        List<String> topicIds = Lists.newLinkedList();
-        Iterator<JsonNode> it = node.get("topicIds").iterator();
-        while(it.hasNext()) {
-            topicIds.add(it.next().asText());
-        }
-        details.setTopicIds(topicIds);
-        return  details;
+  protected ChannelTopicDetails setTopicDetails(JsonNode node) {
+    ChannelTopicDetails details = new ChannelTopicDetails();
+    if (node == null) {
+      return details;
+    }
+    List<String> topicIds = Lists.newLinkedList();
+    Iterator<JsonNode> it = node.get("topicIds").iterator();
+    while (it.hasNext()) {
+      topicIds.add(it.next().asText());
     }
+    details.setTopicIds(topicIds);
+    return  details;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeEventClassifier.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeEventClassifier.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeEventClassifier.java
index de7bc3a..65a454c 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeEventClassifier.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeEventClassifier.java
@@ -18,38 +18,44 @@
 
 package com.youtube.serializer;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.api.services.youtube.model.Video;
 import com.google.common.base.Preconditions;
 import org.apache.commons.lang.StringUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 
 import java.io.IOException;
 
 public class YoutubeEventClassifier {
-    private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-    private static final String VIDEO_IDENTIFIER = "\"youtube#video\"";
-    private static final String CHANNEL_IDENTIFIER = "youtube#channel";
-
-    public static Class detectClass(String json) {
-        Preconditions.checkNotNull(json);
-        Preconditions.checkArgument(StringUtils.isNotEmpty(json));
-
-        ObjectNode objectNode;
-        try {
-            objectNode = (ObjectNode) mapper.readTree(json);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return null;
-        }
-
-        if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().equals(VIDEO_IDENTIFIER)) {
-            return Video.class;
-        }  else if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().contains(CHANNEL_IDENTIFIER)){
-            return com.google.api.services.youtube.model.Channel.class;
-        }  else {
-            return ObjectNode.class;
-        }
+  private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final String VIDEO_IDENTIFIER = "\"youtube#video\"";
+  private static final String CHANNEL_IDENTIFIER = "youtube#channel";
+
+  /**
+   * detect probable Class of a json String from YouTube.
+   * @param json json
+   * @return Class
+   */
+  public static Class detectClass(String json) {
+    Preconditions.checkNotNull(json);
+    Preconditions.checkArgument(StringUtils.isNotEmpty(json));
+
+    ObjectNode objectNode;
+    try {
+      objectNode = (ObjectNode) mapper.readTree(json);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return null;
+    }
+
+    if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().equals(VIDEO_IDENTIFIER)) {
+      return Video.class;
+    } else if (objectNode.findValue("kind") != null && objectNode.get("kind").toString().contains(CHANNEL_IDENTIFIER)) {
+      return com.google.api.services.youtube.model.Channel.class;
+    } else {
+      return ObjectNode.class;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeVideoDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeVideoDeserializer.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeVideoDeserializer.java
index dbe3303..43fe8c6 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeVideoDeserializer.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/serializer/YoutubeVideoDeserializer.java
@@ -24,89 +24,95 @@ import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.JsonDeserializer;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.google.api.client.util.DateTime;
-import com.google.api.services.youtube.model.*;
+import com.google.api.services.youtube.model.Thumbnail;
+import com.google.api.services.youtube.model.ThumbnailDetails;
+import com.google.api.services.youtube.model.Video;
+import com.google.api.services.youtube.model.VideoSnippet;
+import com.google.api.services.youtube.model.VideoStatistics;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-public class YoutubeVideoDeserializer extends JsonDeserializer<Video> {
-    private final static Logger LOGGER = LoggerFactory.getLogger(YoutubeVideoDeserializer.class);
-
-    /**
-     * Because the Youtube Video object contains complex objects within its hierarchy, we have to use
-     * a custom deserializer
-     *
-     * @param jsonParser
-     * @param deserializationContext
-     * @return The deserialized {@link com.google.api.services.youtube.YouTube.Videos} object
-     * @throws java.io.IOException
-     * @throws com.fasterxml.jackson.core.JsonProcessingException
-     */
-    @Override
-    public Video deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
-        JsonNode node = jsonParser.getCodec().readTree(jsonParser);
-        Video video = new Video();
-
-        try {
-            video.setId(node.get("id").asText());
-            video.setEtag(node.get("etag").asText());
-            video.setKind(node.get("kind").asText());
-
-            video.setSnippet(buildSnippet(node));
-            video.setStatistics(buildStatistics(node));
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to deserialize a Video object: {}", e);
-        }
-
-        return video;
-    }
 
-    /**
-     * Given the raw JsonNode, construct a video snippet object
-     * @param node
-     * @return VideoSnippet
-     */
-    private VideoSnippet buildSnippet(JsonNode node) {
-        VideoSnippet snippet = new VideoSnippet();
-        JsonNode snippetNode = node.get("snippet");
+public class YoutubeVideoDeserializer extends JsonDeserializer<Video> {
 
-        snippet.setChannelId(snippetNode.get("channelId").asText());
-        snippet.setChannelTitle(snippetNode.get("channelTitle").asText());
-        snippet.setDescription(snippetNode.get("description").asText());
-        snippet.setTitle(snippetNode.get("title").asText());
-        snippet.setPublishedAt(new DateTime(snippetNode.get("publishedAt").get("value").asLong()));
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeVideoDeserializer.class);
+
+  /**
+   * Because the Youtube Video object contains complex objects within its hierarchy, we have to use
+   * a custom deserializer
+   *
+   * @param jsonParser jsonParser
+   * @param deserializationContext deserializationContext
+   * @return The deserialized {@link com.google.api.services.youtube.YouTube.Videos} object
+   * @throws java.io.IOException IOException
+   * @throws com.fasterxml.jackson.core.JsonProcessingException JsonProcessingException
+   */
+  @Override
+  public Video deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException {
+    JsonNode node = jsonParser.getCodec().readTree(jsonParser);
+    Video video = new Video();
+
+    try {
+      video.setId(node.get("id").asText());
+      video.setEtag(node.get("etag").asText());
+      video.setKind(node.get("kind").asText());
+
+      video.setSnippet(buildSnippet(node));
+      video.setStatistics(buildStatistics(node));
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to deserialize a Video object: {}", ex);
+    }
 
-        ThumbnailDetails thumbnailDetails = new ThumbnailDetails();
-        for(JsonNode t : snippetNode.get("thumbnails")) {
-            Thumbnail thumbnail = new Thumbnail();
+    return video;
+  }
+
+  /**
+   * Given the raw JsonNode, construct a video snippet object.
+   * @param node JsonNode
+   * @return VideoSnippet
+   */
+  private VideoSnippet buildSnippet(JsonNode node) {
+    VideoSnippet snippet = new VideoSnippet();
+    JsonNode snippetNode = node.get("snippet");
+
+    snippet.setChannelId(snippetNode.get("channelId").asText());
+    snippet.setChannelTitle(snippetNode.get("channelTitle").asText());
+    snippet.setDescription(snippetNode.get("description").asText());
+    snippet.setTitle(snippetNode.get("title").asText());
+    snippet.setPublishedAt(new DateTime(snippetNode.get("publishedAt").get("value").asLong()));
+
+    ThumbnailDetails thumbnailDetails = new ThumbnailDetails();
+    for (JsonNode t : snippetNode.get("thumbnails")) {
+      Thumbnail thumbnail = new Thumbnail();
+
+      thumbnail.setHeight(t.get("height").asLong());
+      thumbnail.setUrl(t.get("url").asText());
+      thumbnail.setWidth(t.get("width").asLong());
+
+      thumbnailDetails.setDefault(thumbnail);
+    }
 
-            thumbnail.setHeight(t.get("height").asLong());
-            thumbnail.setUrl(t.get("url").asText());
-            thumbnail.setWidth(t.get("width").asLong());
+    snippet.setThumbnails(thumbnailDetails);
 
-            thumbnailDetails.setDefault(thumbnail);
-        }
+    return snippet;
+  }
 
-        snippet.setThumbnails(thumbnailDetails);
+  /**
+   * Given the raw JsonNode, construct a statistics object.
+   * @param node JsonNode
+   * @return VideoStatistics
+   */
+  private VideoStatistics buildStatistics(JsonNode node) {
+    VideoStatistics statistics = new VideoStatistics();
+    JsonNode statisticsNode = node.get("statistics");
 
-        return snippet;
-    }
+    statistics.setCommentCount(statisticsNode.get("commentCount").bigIntegerValue());
+    statistics.setDislikeCount(statisticsNode.get("dislikeCount").bigIntegerValue());
+    statistics.setFavoriteCount(statisticsNode.get("favoriteCount").bigIntegerValue());
+    statistics.setLikeCount(statisticsNode.get("likeCount").bigIntegerValue());
+    statistics.setViewCount(statisticsNode.get("viewCount").bigIntegerValue());
 
-    /**
-     * Given the raw JsonNode, construct a statistics object
-     * @param node
-     * @return VideoStatistics
-     */
-    private VideoStatistics buildStatistics(JsonNode node) {
-        VideoStatistics statistics = new VideoStatistics();
-        JsonNode statisticsNode = node.get("statistics");
-
-        statistics.setCommentCount(statisticsNode.get("commentCount").bigIntegerValue());
-        statistics.setDislikeCount(statisticsNode.get("dislikeCount").bigIntegerValue());
-        statistics.setFavoriteCount(statisticsNode.get("favoriteCount").bigIntegerValue());
-        statistics.setLikeCount(statisticsNode.get("likeCount").bigIntegerValue());
-        statistics.setViewCount(statisticsNode.get("viewCount").bigIntegerValue());
-
-        return statistics;
-    }
+    return statistics;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/processor/YoutubeTypeConverterTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/processor/YoutubeTypeConverterTest.java b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/processor/YoutubeTypeConverterTest.java
index 8b53776..469b8d0 100644
--- a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/processor/YoutubeTypeConverterTest.java
+++ b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/processor/YoutubeTypeConverterTest.java
@@ -15,17 +15,19 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package com.youtube.processor;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import com.google.api.services.youtube.model.Video;
 import com.youtube.serializer.YoutubeActivityUtil;
 import com.youtube.serializer.YoutubeVideoDeserializer;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -36,67 +38,74 @@ import java.util.List;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
+/**
+ * Test for YoutubeTypeConverter.
+ */
 public class YoutubeTypeConverterTest {
-    private final static Logger LOGGER = LoggerFactory.getLogger(YoutubeTypeConverterTest.class);
-    private final String testVideo = "{\"etag\":\"\\\"4FSIjSQU83ZJMYAO0IqRYMvZX98/V0q3OIauZ3ZAkszLUDbHL45yEGM\\\"\",\"id\":\"sUOepRctwVE\",\"kind\":\"youtube#video\",\"snippet\":{\"channelId\":\"UCNENOn2nmwguQYkejKhJGPQ\",\"channelTitle\":\"Carilion Clinic\",\"description\":\"Join Carilion Clinic's Heart Failure experts for a LIVE Google+ Hangout on Feb. 23, 12:30-1 p.m. to learn more about heart failure, treatment options, and lifestyle changes. Learn more: https://plus.google.com/u/0/events/cj074q9r6csgv6i2kqhi2isc6k0\",\"publishedAt\":{\"value\":1422977409000,\"dateOnly\":false,\"timeZoneShift\":-360},\"thumbnails\":{\"default\":{\"height\":480,\"url\":\"https://i.ytimg.com/vi/sUOepRctwVE/sddefault.jpg\",\"width\":640}},\"title\":\"Be Heart Smart: Congestive Heart Failure LIVE Event\"},\"statistics\":{\"commentCount\":1,\"dislikeCount\":0,\"favoriteCount\":0,\"likeCount\":0,\"viewCount\":9}}";
-
-    private YoutubeTypeConverter youtubeTypeConverter;
-    private ObjectMapper objectMapper;
-
-    @Before
-    public void setup() {
-        objectMapper = StreamsJacksonMapper.getInstance();
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Video.class, new YoutubeVideoDeserializer());
-        objectMapper.registerModule(simpleModule);
-        objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-
-        youtubeTypeConverter = new YoutubeTypeConverter();
-        youtubeTypeConverter.prepare(null);
-    }
-
-    @Test
-    public void testVideoConversion() {
-        try {
-            LOGGER.info("raw: {}", testVideo);
-            Activity activity = new Activity();
-
-            Video video = objectMapper.readValue(testVideo, Video.class);
-            StreamsDatum streamsDatum = new StreamsDatum(video);
-
-            assertNotNull(streamsDatum.getDocument());
-
-            List<StreamsDatum> retList = youtubeTypeConverter.process(streamsDatum);
-            YoutubeActivityUtil.updateActivity(video, activity, "testChannelId");
 
-            assertEquals(retList.size(), 1);
-            assert (retList.get(0).getDocument() instanceof Activity);
-            assertEquals(activity, retList.get(0).getDocument());
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to convert video to activity: {}", e);
-        }
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeTypeConverterTest.class);
+  private final String testVideo = "{\"etag\":\"\\\"4FSIjSQU83ZJMYAO0IqRYMvZX98/V0q3OIauZ3ZAkszLUDbHL45yEGM\\\"\",\"id\":\"sUOepRctwVE\",\"kind\":\"youtube#video\",\"snippet\":{\"channelId\":\"UCNENOn2nmwguQYkejKhJGPQ\",\"channelTitle\":\"Carilion Clinic\",\"description\":\"Join Carilion Clinic's Heart Failure experts for a LIVE Google+ Hangout on Feb. 23, 12:30-1 p.m. to learn more about heart failure, treatment options, and lifestyle changes. Learn more: https://plus.google.com/u/0/events/cj074q9r6csgv6i2kqhi2isc6k0\",\"publishedAt\":{\"value\":1422977409000,\"dateOnly\":false,\"timeZoneShift\":-360},\"thumbnails\":{\"default\":{\"height\":480,\"url\":\"https://i.ytimg.com/vi/sUOepRctwVE/sddefault.jpg\",\"width\":640}},\"title\":\"Be Heart Smart: Congestive Heart Failure LIVE Event\"},\"statistics\":{\"commentCount\":1,\"dislikeCount\":0,\"favoriteCount\":0,\"likeCount\":0,\"viewCount\":9}}";
+
+  private YoutubeTypeConverter youtubeTypeConverter;
+  private ObjectMapper objectMapper;
+
+  /**
+   * setup for test.
+   */
+  @Before
+  public void setup() {
+    objectMapper = StreamsJacksonMapper.getInstance();
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Video.class, new YoutubeVideoDeserializer());
+    objectMapper.registerModule(simpleModule);
+    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+
+    youtubeTypeConverter = new YoutubeTypeConverter();
+    youtubeTypeConverter.prepare(null);
+  }
+
+  @Test
+  public void testVideoConversion() {
+    try {
+      LOGGER.info("raw: {}", testVideo);
+      Activity activity = new Activity();
+
+      Video video = objectMapper.readValue(testVideo, Video.class);
+      StreamsDatum streamsDatum = new StreamsDatum(video);
+
+      assertNotNull(streamsDatum.getDocument());
+
+      List<StreamsDatum> retList = youtubeTypeConverter.process(streamsDatum);
+      YoutubeActivityUtil.updateActivity(video, activity, "testChannelId");
+
+      assertEquals(retList.size(), 1);
+      assert (retList.get(0).getDocument() instanceof Activity);
+      assertEquals(activity, retList.get(0).getDocument());
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to convert video to activity: {}", ex);
     }
+  }
 
-    @Test
-    public void testStringVideoConversion() {
-        try {
-            LOGGER.info("raw: {}", testVideo);
-            Activity activity = new Activity();
+  @Test
+  public void testStringVideoConversion() {
+    try {
+      LOGGER.info("raw: {}", testVideo);
+      Activity activity = new Activity();
 
-            Video video = objectMapper.readValue(testVideo, Video.class);
-            StreamsDatum streamsDatum = new StreamsDatum(testVideo);
+      Video video = objectMapper.readValue(testVideo, Video.class);
+      StreamsDatum streamsDatum = new StreamsDatum(testVideo);
 
-            assertNotNull(streamsDatum.getDocument());
+      assertNotNull(streamsDatum.getDocument());
 
-            List<StreamsDatum> retList = youtubeTypeConverter.process(streamsDatum);
-            YoutubeActivityUtil.updateActivity(video, activity, "testChannelId");
+      List<StreamsDatum> retList = youtubeTypeConverter.process(streamsDatum);
+      YoutubeActivityUtil.updateActivity(video, activity, "testChannelId");
 
-            assertEquals(retList.size(), 1);
-            assert (retList.get(0).getDocument() instanceof Activity);
-            assertEquals(activity, retList.get(0).getDocument());
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to convert video to activity: {}", e);
-        }
+      assertEquals(retList.size(), 1);
+      assert (retList.get(0).getDocument() instanceof Activity);
+      assertEquals(activity, retList.get(0).getDocument());
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to convert video to activity: {}", ex);
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeChannelDataCollectorTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeChannelDataCollectorTest.java b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeChannelDataCollectorTest.java
index 04c9456..4751f00 100644
--- a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeChannelDataCollectorTest.java
+++ b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeChannelDataCollectorTest.java
@@ -19,15 +19,16 @@
 
 package com.youtube.provider;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+import org.apache.streams.util.api.requests.backoff.impl.LinearTimeBackOffStrategy;
+
 import com.google.api.services.youtube.YouTube;
 import com.google.api.services.youtube.model.Channel;
 import com.google.api.services.youtube.model.ChannelListResponse;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Queues;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
-import org.apache.streams.util.api.requests.backoff.impl.LinearTimeBackOffStrategy;
 import org.apache.youtube.pojo.YoutubeConfiguration;
 import org.junit.Test;
 
@@ -41,61 +42,61 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
 /**
- * Created by rebanks on 2/17/15.
+ * YoutubeChannelDataCollectorTest tests YoutubeChannelDataCollector.
  */
 public class YoutubeChannelDataCollectorTest {
 
-    private static final String ID = "12345";
+  private static final String ID = "12345";
 
-    @Test
-    public void testDataCollector() throws Exception {
-        YouTube youTube = createMockYoutube();
-        BlockingQueue<StreamsDatum> queue = Queues.newLinkedBlockingQueue();
-        BackOffStrategy strategy = new LinearTimeBackOffStrategy(1);
-        UserInfo userInfo = new UserInfo();
-        userInfo.setUserId(ID);
-        YoutubeConfiguration config = new YoutubeConfiguration();
-        config.setApiKey(ID);
-        YoutubeChannelDataCollector collector = new YoutubeChannelDataCollector(youTube, queue, strategy, userInfo, config);
-        collector.run();
-        assertEquals(1, queue.size());
-        StreamsDatum datum = queue.take();
-        assertNotNull(datum);
-        String document = (String) datum.getDocument();
-        assertNotNull(document);
-    }
+  @Test
+  public void testDataCollector() throws Exception {
+    YouTube youTube = createMockYoutube();
+    BlockingQueue<StreamsDatum> queue = Queues.newLinkedBlockingQueue();
+    BackOffStrategy strategy = new LinearTimeBackOffStrategy(1);
+    UserInfo userInfo = new UserInfo();
+    userInfo.setUserId(ID);
+    YoutubeConfiguration config = new YoutubeConfiguration();
+    config.setApiKey(ID);
+    YoutubeChannelDataCollector collector = new YoutubeChannelDataCollector(youTube, queue, strategy, userInfo, config);
+    collector.run();
+    assertEquals(1, queue.size());
+    StreamsDatum datum = queue.take();
+    assertNotNull(datum);
+    String document = (String) datum.getDocument();
+    assertNotNull(document);
+  }
 
-    private YouTube createMockYoutube() throws Exception {
-        YouTube mockYouTube = mock(YouTube.class);
-        YouTube.Channels channels = createMockChannels();
-        when(mockYouTube.channels()).thenReturn(channels);
-        return mockYouTube;
-    }
+  private YouTube createMockYoutube() throws Exception {
+    YouTube mockYouTube = mock(YouTube.class);
+    YouTube.Channels channels = createMockChannels();
+    when(mockYouTube.channels()).thenReturn(channels);
+    return mockYouTube;
+  }
 
-    private YouTube.Channels createMockChannels() throws Exception {
-        YouTube.Channels mockChannels = mock(YouTube.Channels.class);
-        YouTube.Channels.List channelLists = createMockChannelsList();
-        when(mockChannels.list(anyString())).thenReturn(channelLists);
-        return mockChannels;
-    }
+  private YouTube.Channels createMockChannels() throws Exception {
+    YouTube.Channels mockChannels = mock(YouTube.Channels.class);
+    YouTube.Channels.List channelLists = createMockChannelsList();
+    when(mockChannels.list(anyString())).thenReturn(channelLists);
+    return mockChannels;
+  }
 
-    private YouTube.Channels.List createMockChannelsList() throws Exception {
-        YouTube.Channels.List mockList = mock(YouTube.Channels.List.class);
-        when(mockList.setId(anyString())).thenReturn(mockList);
-        when(mockList.setKey(anyString())).thenReturn(mockList);
-        ChannelListResponse response = createMockResponse();
-        when(mockList.execute()).thenReturn(response);
-        return mockList;
-    }
+  private YouTube.Channels.List createMockChannelsList() throws Exception {
+    YouTube.Channels.List mockList = mock(YouTube.Channels.List.class);
+    when(mockList.setId(anyString())).thenReturn(mockList);
+    when(mockList.setKey(anyString())).thenReturn(mockList);
+    ChannelListResponse response = createMockResponse();
+    when(mockList.execute()).thenReturn(response);
+    return mockList;
+  }
 
-    private ChannelListResponse createMockResponse() {
-        ChannelListResponse response = new ChannelListResponse();
-        List<Channel> channelList = Lists.newLinkedList();
-        response.setItems(channelList);
-        Channel channel = new Channel();
-        channel.setId(ID);
-        channelList.add(channel);
-        return response;
-    }
+  private ChannelListResponse createMockResponse() {
+    ChannelListResponse response = new ChannelListResponse();
+    List<Channel> channelList = Lists.newLinkedList();
+    response.setItems(channelList);
+    Channel channel = new Channel();
+    channel.setId(ID);
+    channelList.add(channel);
+    return response;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeProviderTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeProviderTest.java b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeProviderTest.java
index 02f1d52..5a2af8a 100644
--- a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeProviderTest.java
+++ b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeProviderTest.java
@@ -19,13 +19,14 @@
 
 package com.youtube.provider;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import com.google.api.client.util.Maps;
 import com.google.api.client.util.Sets;
 import com.google.api.services.youtube.YouTube;
 import com.google.common.collect.Lists;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 import org.apache.youtube.pojo.YoutubeConfiguration;
 import org.joda.time.DateTime;
 import org.junit.Test;
@@ -41,125 +42,128 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
 import static org.mockito.Mockito.mock;
 
+/**
+ * Test for YoutubeProvider.
+ */
 public class YoutubeProviderTest {
 
-    /**
-     * Test that every collector will be run and that data queued from the collectors will be processed.
-     */
-    @Test
-    public void testDataCollectorRunsPerUser() {
-        Random r = new Random(System.currentTimeMillis());
-        int numUsers = r.nextInt(1000);
-        List<UserInfo> userList = Lists.newLinkedList();
-
-        for(int i=0; i < numUsers; ++i) {
-            userList.add(new UserInfo());
-        }
-
-        YoutubeConfiguration config = new YoutubeConfiguration();
-        config.setYoutubeUsers(userList);
-        config.setApiKey("API_KEY");
-        YoutubeProvider provider = buildProvider(config);
-
-        try {
-            provider.prepare(null);
-            provider.startStream();
-            int datumCount = 0;
-            while(provider.isRunning()) {
-                datumCount += provider.readCurrent().size();
-            }
-            assertEquals(numUsers, datumCount);
-        } finally {
-            provider.cleanUp();
-        }
+  /**
+   * Test that every collector will be run and that data queued from the collectors will be processed.
+   */
+  @Test
+  public void testDataCollectorRunsPerUser() {
+    Random random = new Random(System.currentTimeMillis());
+    int numUsers = random.nextInt(1000);
+    List<UserInfo> userList = Lists.newLinkedList();
+
+    for ( int i = 0; i < numUsers; ++i ) {
+      userList.add(new UserInfo());
     }
 
-    @Test
-    public void testConfigSetterGetter() {
-        YoutubeConfiguration config = new YoutubeConfiguration();
-        config.setApiKey("API_KEY");
-        config.setVersion("fake_version_1");
-        YoutubeConfiguration newConfig = new YoutubeConfiguration();
-        newConfig.setApiKey("API_KEY");
-        config.setVersion("fake_version_2");
+    YoutubeConfiguration config = new YoutubeConfiguration();
+    config.setYoutubeUsers(userList);
+    config.setApiKey("API_KEY");
+    YoutubeProvider provider = buildProvider(config);
+
+    try {
+      provider.prepare(null);
+      provider.startStream();
+      int datumCount = 0;
+      while (provider.isRunning()) {
+        datumCount += provider.readCurrent().size();
+      }
+      assertEquals(numUsers, datumCount);
+    } finally {
+      provider.cleanUp();
+    }
+  }
 
-        YoutubeProvider provider = buildProvider(config);
+  @Test
+  public void testConfigSetterGetter() {
+    YoutubeConfiguration config = new YoutubeConfiguration();
+    config.setApiKey("API_KEY");
+    config.setVersion("fake_version_1");
+    YoutubeConfiguration newConfig = new YoutubeConfiguration();
+    newConfig.setApiKey("API_KEY");
+    config.setVersion("fake_version_2");
 
-        assertEquals(provider.getConfig(), config);
+    YoutubeProvider provider = buildProvider(config);
 
-        provider.setConfig(newConfig);
-        assertEquals(provider.getConfig(), newConfig);
-    }
+    assertEquals(provider.getConfig(), config);
 
-    @Test
-    public void testUserInfoWithDefaultDates() {
-        YoutubeConfiguration config = new YoutubeConfiguration();
-        config.setApiKey("API_KEY");
-        YoutubeProvider provider = buildProvider(config);
+    provider.setConfig(newConfig);
+    assertEquals(provider.getConfig(), newConfig);
+  }
 
-        DateTime afterDate = new DateTime(System.currentTimeMillis());
-        DateTime beforeDate = afterDate.minus(10000);
+  @Test
+  public void testUserInfoWithDefaultDates() {
+    YoutubeConfiguration config = new YoutubeConfiguration();
+    config.setApiKey("API_KEY");
+    YoutubeProvider provider = buildProvider(config);
 
-        provider.setDefaultAfterDate(afterDate);
-        provider.setDefaultBeforeDate(beforeDate);
+    DateTime afterDate = new DateTime(System.currentTimeMillis());
+    DateTime beforeDate = afterDate.minus(10000);
 
-        Set<String> users = Sets.newHashSet();
-        users.add("test_user_1");
-        users.add("test_user_2");
-        users.add("test_user_3");
+    provider.setDefaultAfterDate(afterDate);
+    provider.setDefaultBeforeDate(beforeDate);
 
-        provider.setUserInfoWithDefaultDates(users);
+    Set<String> users = Sets.newHashSet();
+    users.add("test_user_1");
+    users.add("test_user_2");
+    users.add("test_user_3");
 
-        List<UserInfo> youtubeUsers = provider.getConfig().getYoutubeUsers();
+    provider.setUserInfoWithDefaultDates(users);
 
-        for(UserInfo user : youtubeUsers) {
-            assert(user.getAfterDate().equals(afterDate));
-            assert(user.getBeforeDate().equals(beforeDate));
-        }
+    List<UserInfo> youtubeUsers = provider.getConfig().getYoutubeUsers();
+
+    for (UserInfo user : youtubeUsers) {
+      assert (user.getAfterDate().equals(afterDate));
+      assert (user.getBeforeDate().equals(beforeDate));
     }
+  }
 
-    @Test
-    public void testUserInfoWithAfterDate() {
-        YoutubeConfiguration config = new YoutubeConfiguration();
-        config.setApiKey("API_KEY");
-        YoutubeProvider provider = buildProvider(config);
+  @Test
+  public void testUserInfoWithAfterDate() {
+    YoutubeConfiguration config = new YoutubeConfiguration();
+    config.setApiKey("API_KEY");
+    YoutubeProvider provider = buildProvider(config);
 
-        Map<String, DateTime> users = Maps.newHashMap();
-        users.put("user1", new DateTime(System.currentTimeMillis()));
-        users.put("user3", new DateTime(System.currentTimeMillis()));
-        users.put("user4", new DateTime(System.currentTimeMillis()));
+    Map<String, DateTime> users = Maps.newHashMap();
+    users.put("user1", new DateTime(System.currentTimeMillis()));
+    users.put("user3", new DateTime(System.currentTimeMillis()));
+    users.put("user4", new DateTime(System.currentTimeMillis()));
 
-        provider.setUserInfoWithAfterDate(users);
+    provider.setUserInfoWithAfterDate(users);
 
-        List<UserInfo> youtubeUsers = provider.getConfig().getYoutubeUsers();
+    List<UserInfo> youtubeUsers = provider.getConfig().getYoutubeUsers();
 
-        for(UserInfo user : youtubeUsers) {
-            assert(user.getAfterDate().equals(users.get(user.getUserId())));
-        }
+    for (UserInfo user : youtubeUsers) {
+      assert (user.getAfterDate().equals(users.get(user.getUserId())));
     }
-
-    private YoutubeProvider buildProvider(YoutubeConfiguration config) {
-        return new YoutubeProvider(config) {
-
-            @Override
-            protected YouTube createYouTubeClient() throws IOException {
-                return mock(YouTube.class);
-            }
-
-            @Override
-            protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, YouTube youtube, UserInfo userInfo) {
-                final BlockingQueue<StreamsDatum> q = queue;
-                return new Runnable() {
-                    @Override
-                    public void run() {
-                        try {
-                            q.put(new StreamsDatum(null));
-                        } catch (InterruptedException ie) {
-                            fail("Test was interrupted");
-                        }
-                    }
-                };
+  }
+
+  private YoutubeProvider buildProvider(YoutubeConfiguration config) {
+    return new YoutubeProvider(config) {
+
+      @Override
+      protected YouTube createYouTubeClient() throws IOException {
+        return mock(YouTube.class);
+      }
+
+      @Override
+      protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, YouTube youtube, UserInfo userInfo) {
+        final BlockingQueue<StreamsDatum> q = queue;
+        return new Runnable() {
+          @Override
+          public void run() {
+            try {
+              q.put(new StreamsDatum(null));
+            } catch (InterruptedException ie) {
+              fail("Test was interrupted");
             }
+          }
         };
-    }
+      }
+    };
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeUserActivityCollectorTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeUserActivityCollectorTest.java b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeUserActivityCollectorTest.java
index 7d46274..1870c14 100644
--- a/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeUserActivityCollectorTest.java
+++ b/streams-contrib/streams-provider-youtube/src/test/java/com/youtube/provider/YoutubeUserActivityCollectorTest.java
@@ -19,13 +19,20 @@
 
 package com.youtube.provider;
 
-import com.google.api.client.util.Lists;
-import com.google.api.services.youtube.YouTube;
-import com.google.api.services.youtube.model.*;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.google.gplus.configuration.UserInfo;
 import org.apache.streams.local.queues.ThroughputQueue;
 import org.apache.streams.util.api.requests.backoff.impl.ExponentialBackOffStrategy;
+
+import com.google.api.client.util.Lists;
+import com.google.api.services.youtube.YouTube;
+import com.google.api.services.youtube.model.Activity;
+import com.google.api.services.youtube.model.ActivityContentDetails;
+import com.google.api.services.youtube.model.ActivityContentDetailsUpload;
+import com.google.api.services.youtube.model.ActivityListResponse;
+import com.google.api.services.youtube.model.Video;
+import com.google.api.services.youtube.model.VideoListResponse;
+import com.google.api.services.youtube.model.VideoSnippet;
 import org.apache.youtube.pojo.YoutubeConfiguration;
 import org.joda.time.DateTime;
 import org.junit.Before;
@@ -37,308 +44,316 @@ import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.BlockingQueue;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
 import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.anyLong;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
+/**
+ * Test for YoutubeUserActivityCollector.
+ */
 public class YoutubeUserActivityCollectorTest {
-    private final String USER_ID = "fake_user_id";
-    private static final String IN_RANGE_IDENTIFIER = "data in range";
-    private YoutubeConfiguration config;
-
-    @Before
-    public void setup() {
-        this.config = new YoutubeConfiguration();
-        this.config.setApiKey("API_KEY");
-    }
+  private static final String USER_ID = "fake_user_id";
+  private static final String IN_RANGE_IDENTIFIER = "data in range";
+  private YoutubeConfiguration config;
 
-    @Test
-    public void testGetVideos() throws IOException {
-        DateTime now = new DateTime(System.currentTimeMillis());
-        YouTube youtube = buildYouTube(0, 1, 0, now, now.minus(10000));
+  @Before
+  public void setup() {
+    this.config = new YoutubeConfiguration();
+    this.config.setApiKey("API_KEY");
+  }
 
-        BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
-        YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
+  @Test
+  public void testGetVideos() throws IOException {
+    DateTime now = new DateTime(System.currentTimeMillis());
+    YouTube youtube = buildYouTube(0, 1, 0, now, now.minus(10000));
 
-        List<Video> video = collector.getVideoList("fake_video_id");
+    BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
+    YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
 
-        assertNotNull(video.get(0));
-    }
+    List<Video> video = collector.getVideoList("fake_video_id");
 
-    @Test
-    public void testGetVideosNull() throws IOException {
-        DateTime now = new DateTime(System.currentTimeMillis());
-        YouTube youtube = buildYouTube(0, 0, 0, now.plus(10000), now.minus(10000));
+    assertNotNull(video.get(0));
+  }
 
-        BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
-        YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
+  @Test
+  public void testGetVideosNull() throws IOException {
+    DateTime now = new DateTime(System.currentTimeMillis());
+    YouTube youtube = buildYouTube(0, 0, 0, now.plus(10000), now.minus(10000));
 
-        List<Video> video = collector.getVideoList("fake_video_id");
+    BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
+    YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
 
-        assertEquals(video.size(), 0);
-    }
+    List<Video> video = collector.getVideoList("fake_video_id");
 
-    @Test
-    public void testProcessActivityFeed() throws IOException, InterruptedException {
-        DateTime now = new DateTime(System.currentTimeMillis());
-        YouTube youtube = buildYouTube(0, 0, 5, now.plus(3000000), now.minus(1000000));
+    assertEquals(video.size(), 0);
+  }
 
-        BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
-        YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
+  @Test
+  public void testProcessActivityFeed() throws IOException, InterruptedException {
+    DateTime now = new DateTime(System.currentTimeMillis());
+    YouTube youtube = buildYouTube(0, 0, 5, now.plus(3000000), now.minus(1000000));
 
-        ActivityListResponse feed = buildActivityListResponse(1);
+    BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
+    YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
 
-        collector.processActivityFeed(feed, new DateTime(System.currentTimeMillis()), null);
+    ActivityListResponse feed = buildActivityListResponse(1);
 
-        assertEquals(collector.getDatumQueue().size(), 5);
-    }
+    collector.processActivityFeed(feed, new DateTime(System.currentTimeMillis()), null);
 
-    @Test
-    public void testProcessActivityFeedBefore() throws IOException, InterruptedException {
-        DateTime now = new DateTime(System.currentTimeMillis());
-        YouTube youtube = buildYouTube(5, 0, 0, now, now);
+    assertEquals(collector.getDatumQueue().size(), 5);
+  }
 
-        BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
-        YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
+  @Test
+  public void testProcessActivityFeedBefore() throws IOException, InterruptedException {
+    DateTime now = new DateTime(System.currentTimeMillis());
+    YouTube youtube = buildYouTube(5, 0, 0, now, now);
 
-        ActivityListResponse feed = buildActivityListResponse(1);
+    BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
+    YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
 
-        collector.processActivityFeed(feed, new DateTime(System.currentTimeMillis()), null);
+    ActivityListResponse feed = buildActivityListResponse(1);
 
-        assertEquals(collector.getDatumQueue().size(), 0);
-    }
+    collector.processActivityFeed(feed, new DateTime(System.currentTimeMillis()), null);
 
-    @Test
-    public void testProcessActivityFeedAfter() throws IOException, InterruptedException {
-        DateTime now = new DateTime(System.currentTimeMillis());
-        YouTube youtube = buildYouTube(0, 5, 0, now, now);
+    assertEquals(collector.getDatumQueue().size(), 0);
+  }
 
-        BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
-        YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
+  @Test
+  public void testProcessActivityFeedAfter() throws IOException, InterruptedException {
+    DateTime now = new DateTime(System.currentTimeMillis());
+    YouTube youtube = buildYouTube(0, 5, 0, now, now);
 
-        ActivityListResponse feed = buildActivityListResponse(1);
+    BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
+    YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
 
-        collector.processActivityFeed(feed, new DateTime(now.getMillis()-100000), null);
+    ActivityListResponse feed = buildActivityListResponse(1);
 
-        assertEquals(collector.getDatumQueue().size(), 5);
-    }
+    collector.processActivityFeed(feed, new DateTime(now.getMillis() - 100000), null);
 
-    @Test
-    public void testProcessActivityFeedMismatchCount() throws IOException, InterruptedException {
-        DateTime now = new DateTime(System.currentTimeMillis());
-        YouTube youtube = buildYouTube(5, 5, 5, now, now.minus(100000));
+    assertEquals(collector.getDatumQueue().size(), 5);
+  }
 
-        BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
-        YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
+  @Test
+  public void testProcessActivityFeedMismatchCount() throws IOException, InterruptedException {
+    DateTime now = new DateTime(System.currentTimeMillis());
+    YouTube youtube = buildYouTube(5, 5, 5, now, now.minus(100000));
 
-        ActivityListResponse feed = buildActivityListResponse(1);
+    BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
+    YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
 
-        collector.processActivityFeed(feed, new DateTime(now), null);
+    ActivityListResponse feed = buildActivityListResponse(1);
 
-        assertEquals(collector.getDatumQueue().size(), 5);
-    }
+    collector.processActivityFeed(feed, new DateTime(now), null);
 
-    @Test
-    public void testProcessActivityFeedMismatchCountInRange() throws IOException, InterruptedException {
-        DateTime now = new DateTime(System.currentTimeMillis());
-        YouTube youtube = buildYouTube(5, 5, 5, now, now.minus(100000));
+    assertEquals(collector.getDatumQueue().size(), 5);
+  }
 
-        BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
-        YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
+  @Test
+  public void testProcessActivityFeedMismatchCountInRange() throws IOException, InterruptedException {
+    DateTime now = new DateTime(System.currentTimeMillis());
+    YouTube youtube = buildYouTube(5, 5, 5, now, now.minus(100000));
 
-        ActivityListResponse feed = buildActivityListResponse(1);
+    BlockingQueue<StreamsDatum> datumQueue = new ThroughputQueue<>();
+    YoutubeUserActivityCollector collector = new YoutubeUserActivityCollector(youtube, datumQueue, new ExponentialBackOffStrategy(2), new UserInfo().withUserId(USER_ID), this.config);
 
-        collector.processActivityFeed(feed, new DateTime(now), new DateTime(now).minus(100000));
+    ActivityListResponse feed = buildActivityListResponse(1);
 
-        assertEquals(collector.getDatumQueue().size(), 5);
-    }
+    collector.processActivityFeed(feed, new DateTime(now), new DateTime(now).minus(100000));
 
-    private ActivityListResponse buildActivityListResponse(int num) {
-        ActivityListResponse activityListResponse = new ActivityListResponse();
-        List<Activity> items = Lists.newArrayList();
+    assertEquals(collector.getDatumQueue().size(), 5);
+  }
 
-        for(int x = 0; x < num; x ++ ) {
-            Activity activity = new Activity();
+  private ActivityListResponse buildActivityListResponse(int num) {
+    ActivityListResponse activityListResponse = new ActivityListResponse();
+    List<Activity> items = Lists.newArrayList();
 
-            ActivityContentDetails contentDetails = new ActivityContentDetails();
-            ActivityContentDetailsUpload upload = new ActivityContentDetailsUpload();
-            upload.setVideoId("video_id_" + x);
-            contentDetails.setUpload(upload);
+    for ( int x = 0; x < num; x++ ) {
+      Activity activity = new Activity();
 
-            activity.setId("id_" + x);
-            activity.setContentDetails(contentDetails);
+      ActivityContentDetails contentDetails = new ActivityContentDetails();
+      ActivityContentDetailsUpload upload = new ActivityContentDetailsUpload();
+      upload.setVideoId("video_id_" + x);
+      contentDetails.setUpload(upload);
 
-            items.add(activity);
-        }
+      activity.setId("id_" + x);
+      activity.setContentDetails(contentDetails);
 
-        activityListResponse.setItems(items);
-
-        return activityListResponse;
+      items.add(activity);
     }
 
-    private YouTube buildYouTube(int numBeforeRange, int numAfterRange, int numInRange, DateTime afterDate, DateTime beforeDate) {
+    activityListResponse.setItems(items);
 
-        YouTube youtube = createYoutubeMock(numBeforeRange, numAfterRange, numInRange, afterDate, beforeDate);
+    return activityListResponse;
+  }
 
-        return youtube;
-    }
+  private YouTube buildYouTube(int numBeforeRange, int numAfterRange, int numInRange, DateTime afterDate, DateTime beforeDate) {
 
-    private YouTube createYoutubeMock(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before) {
-        YouTube youtube = mock(YouTube.class);
+    YouTube youtube = createYoutubeMock(numBeforeRange, numAfterRange, numInRange, afterDate, beforeDate);
 
-        final YouTube.Videos videos = createMockVideos(numBefore, numAfter, numInRange, after, before);
-        doAnswer(new Answer() {
-            @Override
-            public YouTube.Videos answer(InvocationOnMock invocationOnMock) throws Throwable {
-                return videos;
-            }
-        }).when(youtube).videos();
+    return youtube;
+  }
 
-        return youtube;
-    }
+  private YouTube createYoutubeMock(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before) {
+    YouTube youtube = mock(YouTube.class);
+
+    final YouTube.Videos videos = createMockVideos(numBefore, numAfter, numInRange, after, before);
+    doAnswer(new Answer() {
+      @Override
+      public YouTube.Videos answer(InvocationOnMock invocationOnMock) throws Throwable {
+        return videos;
+      }
+    }).when(youtube).videos();
 
-    private YouTube.Videos createMockVideos(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before) {
-        YouTube.Videos videos = mock(YouTube.Videos.class);
+    return youtube;
+  }
 
-        try {
-            YouTube.Videos.List list = createMockVideosList(numBefore, numAfter, numInRange, after, before);
-            when(videos.list(anyString())).thenReturn(list);
-        } catch (IOException e) {
-            fail("Exception thrown while creating mock");
-        }
+  private YouTube.Videos createMockVideos(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before) {
+    YouTube.Videos videos = mock(YouTube.Videos.class);
 
-        return videos;
+    try {
+      YouTube.Videos.List list = createMockVideosList(numBefore, numAfter, numInRange, after, before);
+      when(videos.list(anyString())).thenReturn(list);
+    } catch (IOException ex) {
+      fail("Exception thrown while creating mock");
     }
 
-    private YouTube.Videos.List createMockVideosList(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before) {
-        YouTube.Videos.List list = mock(YouTube.Videos.List.class);
+    return videos;
+  }
 
-        when(list.setMaxResults(anyLong())).thenReturn(list);
-        when(list.setPageToken(anyString())).thenReturn(list);
-        when(list.setId(anyString())).thenReturn(list);
-        when(list.setKey(anyString())).thenReturn(list);
+  private YouTube.Videos.List createMockVideosList(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before) {
+    YouTube.Videos.List list = mock(YouTube.Videos.List.class);
 
-        VideoListResponseAnswer answer = new VideoListResponseAnswer(numBefore, numAfter, numInRange, after, before);
-        try {
-            doAnswer(answer).when(list).execute();
-        } catch (IOException ioe) {
-            fail("Should not have thrown exception while creating mock. : "+ioe.getMessage());
-        }
+    when(list.setMaxResults(anyLong())).thenReturn(list);
+    when(list.setPageToken(anyString())).thenReturn(list);
+    when(list.setId(anyString())).thenReturn(list);
+    when(list.setKey(anyString())).thenReturn(list);
 
-        return list;
+    VideoListResponseAnswer answer = new VideoListResponseAnswer(numBefore, numAfter, numInRange, after, before);
+    try {
+      doAnswer(answer).when(list).execute();
+    } catch (IOException ioe) {
+      fail("Should not have thrown exception while creating mock. : " + ioe.getMessage());
     }
 
-    private static VideoListResponse createMockVideoListResponse(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before, boolean page) {
-        VideoListResponse feed = new VideoListResponse();
-        List<Video> list = com.google.common.collect.Lists.newLinkedList();
+    return list;
+  }
 
-        for(int i=0; i < numAfter; ++i) {
-            com.google.api.client.util.DateTime published = new com.google.api.client.util.DateTime(after.getMillis() + 1000000);
-            Video video = new Video();
-            video.setSnippet(new VideoSnippet());
-            video.getSnippet().setPublishedAt(published);
-            list.add(video);
-        }
-        for(int i=0; i < numInRange; ++i) {
-            DateTime published = null;
-            if((before == null && after == null) || before == null) {
-                published = DateTime.now(); // no date range or end time date range so just make the time now.
-            } else if(after == null) {
-                published = before.minusMillis(100000); //no beginning to range
-            } else { // has to be in range
-                long range = before.getMillis() - after.getMillis();
-                published = after.plus(range / 2); //in the middle
-            }
-            com.google.api.client.util.DateTime gPublished = new com.google.api.client.util.DateTime(published.getMillis());
-            Video video = new Video();
-            video.setSnippet(new VideoSnippet());
-            video.getSnippet().setPublishedAt(gPublished);
-            video.getSnippet().setTitle(IN_RANGE_IDENTIFIER);
-            list.add(video);
-        }
-        for(int i=0; i < numBefore; ++i) {
-            com.google.api.client.util.DateTime published = new com.google.api.client.util.DateTime((after.minusMillis(100000)).getMillis());
-            Video video = new Video();
-            video.setSnippet(new VideoSnippet());
-            video.getSnippet().setPublishedAt(published);
-            list.add(video);
-        }
-        if(page) {
-            feed.setNextPageToken("A");
-        } else {
-            feed.setNextPageToken(null);
-        }
+  private static VideoListResponse createMockVideoListResponse(int numBefore, int numAfter, int numInRange,  DateTime after, DateTime before, boolean page) {
+    VideoListResponse feed = new VideoListResponse();
+    List<Video> list = com.google.common.collect.Lists.newLinkedList();
 
-        feed.setItems(list);
+    for (int i = 0; i < numAfter; ++i) {
+      com.google.api.client.util.DateTime published = new com.google.api.client.util.DateTime(after.getMillis() + 1000000);
+      Video video = new Video();
+      video.setSnippet(new VideoSnippet());
+      video.getSnippet().setPublishedAt(published);
+      list.add(video);
+    }
+    for (int i = 0; i < numInRange; ++i) {
+      DateTime published = null;
+      if ((before == null && after == null) || before == null) {
+        published = DateTime.now(); // no date range or end time date range so just make the time now.
+      } else if (after == null) {
+        published = before.minusMillis(100000); //no beginning to range
+      } else { // has to be in range
+        long range = before.getMillis() - after.getMillis();
+        published = after.plus(range / 2); //in the middle
+      }
+      com.google.api.client.util.DateTime ytPublished = new com.google.api.client.util.DateTime(published.getMillis());
+      Video video = new Video();
+      video.setSnippet(new VideoSnippet());
+      video.getSnippet().setPublishedAt(ytPublished);
+      video.getSnippet().setTitle(IN_RANGE_IDENTIFIER);
+      list.add(video);
+    }
+    for (int i = 0; i < numBefore; ++i) {
+      com.google.api.client.util.DateTime published = new com.google.api.client.util.DateTime((after.minusMillis(100000)).getMillis());
+      Video video = new Video();
+      video.setSnippet(new VideoSnippet());
+      video.getSnippet().setPublishedAt(published);
+      list.add(video);
+    }
+    if (page) {
+      feed.setNextPageToken("A");
+    } else {
+      feed.setNextPageToken(null);
+    }
 
-        return feed;
+    feed.setItems(list);
+
+    return feed;
+  }
+
+  private static class VideoListResponseAnswer implements Answer<VideoListResponse> {
+    private int afterCount = 0;
+    private int beforeCount = 0;
+    private int inCount = 0;
+    private int maxBatch = 100;
+
+    private int numAfter;
+    private int numInRange;
+    private int numBefore;
+    private DateTime after;
+    private DateTime before;
+
+    private VideoListResponseAnswer(int numBefore, int numAfter, int numInRange, DateTime after, DateTime before) {
+      this.numBefore = numBefore;
+      this.numAfter = numAfter;
+      this.numInRange = numInRange;
+      this.after = after;
+      this.before = before;
     }
 
-    private static class VideoListResponseAnswer implements Answer<VideoListResponse> {
-        private int afterCount = 0;
-        private int beforeCount = 0;
-        private int inCount = 0;
-        private int maxBatch = 100;
-
-        private int numAfter;
-        private int numInRange;
-        private int numBefore;
-        private DateTime after;
-        private DateTime before;
-
-        private VideoListResponseAnswer(int numBefore, int numAfter, int numInRange, DateTime after, DateTime before) {
-            this.numBefore = numBefore;
-            this.numAfter = numAfter;
-            this.numInRange = numInRange;
-            this.after = after;
-            this.before = before;
+    @Override
+    public VideoListResponse answer(InvocationOnMock invocationOnMock) throws Throwable {
+      int totalCount = 0;
+      int batchAfter = 0;
+      int batchBefore = 0;
+      int batchIn = 0;
+      inCount = 0;
+      afterCount = 0;
+      beforeCount = 0;
+
+      if (afterCount != numAfter) {
+        if (numAfter - afterCount >= maxBatch) {
+          afterCount += maxBatch;
+          batchAfter += maxBatch;
+          totalCount += batchAfter;
+        } else {
+          batchAfter += numAfter - afterCount;
+          totalCount += numAfter - afterCount;
+          afterCount = numAfter;
         }
-
-        @Override
-        public VideoListResponse answer(InvocationOnMock invocationOnMock) throws Throwable {
-            int totalCount = 0;
-            int batchAfter = 0;
-            int batchBefore = 0;
-            int batchIn = 0;
-            inCount = 0;
-            afterCount = 0;
-            beforeCount = 0;
-
-            if(afterCount != numAfter) {
-                if(numAfter - afterCount >= maxBatch) {
-                    afterCount += maxBatch;
-                    batchAfter += maxBatch;
-                    totalCount += batchAfter;
-                } else {
-                    batchAfter += numAfter - afterCount;
-                    totalCount += numAfter - afterCount;
-                    afterCount = numAfter;
-                }
-            }
-            if(totalCount < maxBatch && inCount != numInRange) {
-                if(numInRange - inCount >= maxBatch - totalCount) {
-                    inCount += maxBatch - totalCount;
-                    batchIn += maxBatch - totalCount;
-                    totalCount += batchIn;
-                } else {
-                    batchIn += numInRange - inCount;
-                    totalCount += numInRange - inCount;
-                    inCount = numInRange;
-                }
-            }
-            if(totalCount < maxBatch && beforeCount != numBefore) {
-                if(numBefore - batchBefore >= maxBatch - totalCount) {
-                    batchBefore += maxBatch - totalCount;
-                    totalCount = maxBatch;
-                    beforeCount +=batchBefore;
-                } else {
-                    batchBefore += numBefore - beforeCount;
-                    totalCount += numBefore - beforeCount;
-                    beforeCount = numBefore;
-                }
-            }
-
-            return createMockVideoListResponse(batchBefore, batchAfter, batchIn, after, before, numAfter != afterCount || inCount != numInRange || beforeCount != numBefore);
+      }
+      if (totalCount < maxBatch && inCount != numInRange) {
+        if (numInRange - inCount >= maxBatch - totalCount) {
+          inCount += maxBatch - totalCount;
+          batchIn += maxBatch - totalCount;
+          totalCount += batchIn;
+        } else {
+          batchIn += numInRange - inCount;
+          totalCount += numInRange - inCount;
+          inCount = numInRange;
+        }
+      }
+      if (totalCount < maxBatch && beforeCount != numBefore) {
+        if (numBefore - batchBefore >= maxBatch - totalCount) {
+          batchBefore += maxBatch - totalCount;
+          totalCount = maxBatch;
+          beforeCount += batchBefore;
+        } else {
+          batchBefore += numBefore - beforeCount;
+          totalCount += numBefore - beforeCount;
+          beforeCount = numBefore;
         }
+      }
+
+      return createMockVideoListResponse(batchBefore, batchAfter, batchIn, after, before, numAfter != afterCount || inCount != numInRange || beforeCount != numBefore);
     }
+  }
 }
\ No newline at end of file


[18/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterDocumentClassifierTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterDocumentClassifierTest.java b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterDocumentClassifierTest.java
index a1ca7c5..418491a 100644
--- a/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterDocumentClassifierTest.java
+++ b/streams-contrib/streams-provider-twitter/src/test/java/org/apache/streams/twitter/test/utils/TwitterDocumentClassifierTest.java
@@ -24,6 +24,7 @@ import org.apache.streams.twitter.pojo.Follow;
 import org.apache.streams.twitter.pojo.Retweet;
 import org.apache.streams.twitter.pojo.Tweet;
 import org.apache.streams.twitter.pojo.User;
+
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -34,55 +35,60 @@ import java.util.List;
  */
 public class TwitterDocumentClassifierTest {
 
-    private String tweet = "{\"created_at\":\"Wed Dec 11 22:27:34 +0000 2013\",\"id\":12345,\"id_str\":\"12345\",\"text\":\"text\",\"source\":\"source\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":91407775,\"id_str\":\"12345\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"\",\"url\":null,\"description\":null,\"protected\":false,\"followers_count\":136,\"friends_count\":0,\"listed_count\":1,\"created_at\":\"Fri Nov 20 19:29:02 +0000 2009\",\"favourites_count\":0,\"utc_offset\":null,\"time_zone\":null,\"geo_enabled\":false,\"verified\":false,\"statuses_count\":1793,\"lang\":\"en\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C0DEED\",\"profile_background_image_url\":\"http:\\/\\/profile_background_image_url.png\",\"profile_background_image_url_https\":\"https:\\/\\/profile_b
 ackground_image_url_https.png\",\"profile_background_tile\":false,\"profile_image_url\":\"http:\\/\\/profile_image_url.jpg\",\"profile_image_url_https\":\"https:\\/\\/profile_image_url_https.jpg\",\"profile_link_color\":\"0084B4\",\"profile_sidebar_border_color\":\"C0DEED\",\"profile_sidebar_fill_color\":\"DDEEF6\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"default_profile\":true,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[{\"url\":\"http:\\/\\/url\",\"expanded_url\":\"http:\\/\\/expanded_url\",\"display_url\":\"display_url\",\"indices\":[118,140]}],\"user_mentions\":[]},\"favorited\":false,\"retweeted\":false,\"possibly_sensitive\":false,\"filter_level\":\"medium\",\"lang\":\"en\"}\n";
-    private String retweet = "{\"created_at\":\"Wed Dec 11 22:27:34 +0000 2013\",\"id\":23456,\"id_str\":\"23456\",\"text\":\"text\",\"source\":\"web\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":163149656,\"id_str\":\"34567\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"location\",\"url\":\"http:\\/\\/www.youtube.com\\/watch?v=url\",\"description\":\"description\\u00ed\",\"protected\":false,\"followers_count\":41,\"friends_count\":75,\"listed_count\":2,\"created_at\":\"Mon Jul 05 17:35:49 +0000 2010\",\"favourites_count\":4697,\"utc_offset\":-10800,\"time_zone\":\"Buenos Aires\",\"geo_enabled\":false,\"verified\":false,\"statuses_count\":5257,\"lang\":\"es\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C4A64B\",\"profile_background_image_url\":\"http:\\/\\/a0.twimg.com\\
 /profile_background_images\\/12345\\/12345.jpeg\",\"profile_background_image_url_https\":\"https:\\/\\/si0.twimg.com\\/profile_background_images\\/12345\\/12345.jpeg\",\"profile_background_tile\":true,\"profile_image_url\":\"http:\\/\\/pbs.twimg.com\\/profile_images\\/12345\\/12345.jpeg\",\"profile_image_url_https\":\"https:\\/\\/pbs.twimg.com\\/profile_images\\/12345\\/12345.jpeg\",\"profile_banner_url\":\"https:\\/\\/pbs.twimg.com\\/profile_banners\\/12345\\/12345\",\"profile_link_color\":\"BF415A\",\"profile_sidebar_border_color\":\"000000\",\"profile_sidebar_fill_color\":\"B17CED\",\"profile_text_color\":\"3D1957\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweeted_status\":{\"created_at\":\"Wed Dec 11 22:25:06 +0000 2013\",\"id\":34567,\"id_str\":\"34567\",\"text\":\"text\",\"sourc
 e\":\"source\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":34567,\"id_str\":\"34567\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"\",\"url\":\"http:\\/\\/www.web.com\",\"description\":\"description\",\"protected\":false,\"followers_count\":34307,\"friends_count\":325,\"listed_count\":361,\"created_at\":\"Fri Apr 13 19:00:11 +0000 2012\",\"favourites_count\":44956,\"utc_offset\":3600,\"time_zone\":\"Madrid\",\"geo_enabled\":false,\"verified\":false,\"statuses_count\":24011,\"lang\":\"es\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"000000\",\"profile_background_image_url\":\"http:\\/\\/profile_background_image_url.jpeg\",\"profile_background_image_url_https\":\"https:\\/\\/si0.twimg.com\\/profile_background_images\\/34567\\/34567.jpeg\",\"profile_background_tile\":false,
 \"profile_image_url\":\"http:\\/\\/pbs.twimg.com\\/profile_images\\/34567\\/34567.gif\",\"profile_image_url_https\":\"https:\\/\\/pbs.twimg.com\\/profile_images\\/34567\\/34567.gif\",\"profile_banner_url\":\"https:\\/\\/pbs.twimg.com\\/profile_banners\\/34567\\/34567\",\"profile_link_color\":\"FF00E1\",\"profile_sidebar_border_color\":\"FFFFFF\",\"profile_sidebar_fill_color\":\"F3F3F3\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":9,\"favorite_count\":6,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[],\"user_mentions\":[]},\"favorited\":false,\"retweeted\":false,\"lang\":\"es\"},\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[],\"user_mentions\":[{\"screen_name\":\"screen_name\",\"name\
 ":\"name emocional\",\"id\":45678,\"id_str\":\"45678\",\"indices\":[3,14]}]},\"favorited\":false,\"retweeted\":false,\"filter_level\":\"medium\",\"lang\":\"es\"}\n";
-    private String delete = "{\"delete\":{\"status\":{\"id\":56789,\"user_id\":67890,\"id_str\":\"56789\",\"user_id_str\":\"67890\"}}}\n";
-    private String follow = "{\"follower\":{\"id\":12345},\"followee\":{\"id\":56789}}\n";
-    private String user = "{\"location\":\"\",\"default_profile\":true,\"profile_background_tile\":false,\"statuses_count\":1,\"lang\":\"en\",\"profile_link_color\":\"0084B4\",\"id\":67890,\"following\":false,\"protected\":false,\"favourites_count\":0,\"profile_text_color\":\"333333\",\"description\":\"\",\"verified\":false,\"contributors_enabled\":false,\"profile_sidebar_border_color\":\"C0DEED\",\"name\":\"name\",\"profile_background_color\":\"C0DEED\",\"created_at\":\"Fri Apr 17 12:35:56 +0000 2009\",\"is_translation_enabled\":false,\"default_profile_image\":true,\"followers_count\":2,\"profile_image_url_https\":\"https://profile_image_url_https.png\",\"geo_enabled\":false,\"status\":{\"contributors\":null,\"text\":\"Working\",\"geo\":null,\"retweeted\":false,\"in_reply_to_screen_name\":null,\"truncated\":false,\"lang\":\"en\",\"entities\":{\"symbols\":[],\"urls\":[],\"hashtags\":[],\"user_mentions\":[]},\"in_reply_to_status_id_str\":null,\"id\":67890,\"source\":\"web\",\"in_repl
 y_to_user_id_str\":null,\"favorited\":false,\"in_reply_to_status_id\":null,\"retweet_count\":0,\"created_at\":\"Fri Apr 17 12:37:54 +0000 2009\",\"in_reply_to_user_id\":null,\"favorite_count\":0,\"id_str\":\"67890\",\"place\":null,\"coordinates\":null},\"profile_background_image_url\":\"http://abs.twimg.com/profile_background_image_url.png\",\"profile_background_image_url_https\":\"https://abs.twimg.com/images/profile_background_image_url_https.png\",\"follow_request_sent\":false,\"entities\":{\"description\":{\"urls\":[]}},\"url\":null,\"utc_offset\":null,\"time_zone\":null,\"notifications\":false,\"profile_use_background_image\":true,\"friends_count\":1,\"profile_sidebar_fill_color\":\"DDEEF6\",\"screen_name\":\"screen_name\",\"id_str\":\"67890\",\"profile_image_url\":\"http://abs.twimg.com/sticky/default_profile_images/default_profile_1_normal.png\",\"listed_count\":0,\"is_translator\":false}";
+  private String tweet = "{\"created_at\":\"Wed Dec 11 22:27:34 +0000 2013\",\"id\":12345,\"id_str\":\"12345\",\"text\":\"text\",\"source\":\"source\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":91407775,\"id_str\":\"12345\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"\",\"url\":null,\"description\":null,\"protected\":false,\"followers_count\":136,\"friends_count\":0,\"listed_count\":1,\"created_at\":\"Fri Nov 20 19:29:02 +0000 2009\",\"favourites_count\":0,\"utc_offset\":null,\"time_zone\":null,\"geo_enabled\":false,\"verified\":false,\"statuses_count\":1793,\"lang\":\"en\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C0DEED\",\"profile_background_image_url\":\"http:\\/\\/profile_background_image_url.png\",\"profile_background_image_url_https\":\"https:\\/\\/profile_bac
 kground_image_url_https.png\",\"profile_background_tile\":false,\"profile_image_url\":\"http:\\/\\/profile_image_url.jpg\",\"profile_image_url_https\":\"https:\\/\\/profile_image_url_https.jpg\",\"profile_link_color\":\"0084B4\",\"profile_sidebar_border_color\":\"C0DEED\",\"profile_sidebar_fill_color\":\"DDEEF6\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"default_profile\":true,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[{\"url\":\"http:\\/\\/url\",\"expanded_url\":\"http:\\/\\/expanded_url\",\"display_url\":\"display_url\",\"indices\":[118,140]}],\"user_mentions\":[]},\"favorited\":false,\"retweeted\":false,\"possibly_sensitive\":false,\"filter_level\":\"medium\",\"lang\":\"en\"}\n";
+  private String retweet = "{\"created_at\":\"Wed Dec 11 22:27:34 +0000 2013\",\"id\":23456,\"id_str\":\"23456\",\"text\":\"text\",\"source\":\"web\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":163149656,\"id_str\":\"34567\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"location\",\"url\":\"http:\\/\\/www.youtube.com\\/watch?v=url\",\"description\":\"description\\u00ed\",\"protected\":false,\"followers_count\":41,\"friends_count\":75,\"listed_count\":2,\"created_at\":\"Mon Jul 05 17:35:49 +0000 2010\",\"favourites_count\":4697,\"utc_offset\":-10800,\"time_zone\":\"Buenos Aires\",\"geo_enabled\":false,\"verified\":false,\"statuses_count\":5257,\"lang\":\"es\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"C4A64B\",\"profile_background_image_url\":\"http:\\/\\/a0.twimg.com\\/p
 rofile_background_images\\/12345\\/12345.jpeg\",\"profile_background_image_url_https\":\"https:\\/\\/si0.twimg.com\\/profile_background_images\\/12345\\/12345.jpeg\",\"profile_background_tile\":true,\"profile_image_url\":\"http:\\/\\/pbs.twimg.com\\/profile_images\\/12345\\/12345.jpeg\",\"profile_image_url_https\":\"https:\\/\\/pbs.twimg.com\\/profile_images\\/12345\\/12345.jpeg\",\"profile_banner_url\":\"https:\\/\\/pbs.twimg.com\\/profile_banners\\/12345\\/12345\",\"profile_link_color\":\"BF415A\",\"profile_sidebar_border_color\":\"000000\",\"profile_sidebar_fill_color\":\"B17CED\",\"profile_text_color\":\"3D1957\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweeted_status\":{\"created_at\":\"Wed Dec 11 22:25:06 +0000 2013\",\"id\":34567,\"id_str\":\"34567\",\"text\":\"text\",\"source\
 ":\"source\",\"truncated\":false,\"in_reply_to_status_id\":null,\"in_reply_to_status_id_str\":null,\"in_reply_to_user_id\":null,\"in_reply_to_user_id_str\":null,\"in_reply_to_screen_name\":null,\"user\":{\"id\":34567,\"id_str\":\"34567\",\"name\":\"name\",\"screen_name\":\"screen_name\",\"location\":\"\",\"url\":\"http:\\/\\/www.web.com\",\"description\":\"description\",\"protected\":false,\"followers_count\":34307,\"friends_count\":325,\"listed_count\":361,\"created_at\":\"Fri Apr 13 19:00:11 +0000 2012\",\"favourites_count\":44956,\"utc_offset\":3600,\"time_zone\":\"Madrid\",\"geo_enabled\":false,\"verified\":false,\"statuses_count\":24011,\"lang\":\"es\",\"contributors_enabled\":false,\"is_translator\":false,\"profile_background_color\":\"000000\",\"profile_background_image_url\":\"http:\\/\\/profile_background_image_url.jpeg\",\"profile_background_image_url_https\":\"https:\\/\\/si0.twimg.com\\/profile_background_images\\/34567\\/34567.jpeg\",\"profile_background_tile\":false,\"
 profile_image_url\":\"http:\\/\\/pbs.twimg.com\\/profile_images\\/34567\\/34567.gif\",\"profile_image_url_https\":\"https:\\/\\/pbs.twimg.com\\/profile_images\\/34567\\/34567.gif\",\"profile_banner_url\":\"https:\\/\\/pbs.twimg.com\\/profile_banners\\/34567\\/34567\",\"profile_link_color\":\"FF00E1\",\"profile_sidebar_border_color\":\"FFFFFF\",\"profile_sidebar_fill_color\":\"F3F3F3\",\"profile_text_color\":\"333333\",\"profile_use_background_image\":true,\"default_profile\":false,\"default_profile_image\":false,\"following\":null,\"follow_request_sent\":null,\"notifications\":null},\"geo\":null,\"coordinates\":null,\"place\":null,\"contributors\":null,\"retweet_count\":9,\"favorite_count\":6,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[],\"user_mentions\":[]},\"favorited\":false,\"retweeted\":false,\"lang\":\"es\"},\"retweet_count\":0,\"favorite_count\":0,\"entities\":{\"hashtags\":[],\"symbols\":[],\"urls\":[],\"user_mentions\":[{\"screen_name\":\"screen_name\",\"name\":
 \"name emocional\",\"id\":45678,\"id_str\":\"45678\",\"indices\":[3,14]}]},\"favorited\":false,\"retweeted\":false,\"filter_level\":\"medium\",\"lang\":\"es\"}\n";
+  private String delete = "{\"delete\":{\"status\":{\"id\":56789,\"user_id\":67890,\"id_str\":\"56789\",\"user_id_str\":\"67890\"}}}\n";
+  private String follow = "{\"follower\":{\"id\":12345},\"followee\":{\"id\":56789}}\n";
+  private String user = "{\"location\":\"\",\"default_profile\":true,\"profile_background_tile\":false,\"statuses_count\":1,\"lang\":\"en\",\"profile_link_color\":\"0084B4\",\"id\":67890,\"following\":false,\"protected\":false,\"favourites_count\":0,\"profile_text_color\":\"333333\",\"description\":\"\",\"verified\":false,\"contributors_enabled\":false,\"profile_sidebar_border_color\":\"C0DEED\",\"name\":\"name\",\"profile_background_color\":\"C0DEED\",\"created_at\":\"Fri Apr 17 12:35:56 +0000 2009\",\"is_translation_enabled\":false,\"default_profile_image\":true,\"followers_count\":2,\"profile_image_url_https\":\"https://profile_image_url_https.png\",\"geo_enabled\":false,\"status\":{\"contributors\":null,\"text\":\"Working\",\"geo\":null,\"retweeted\":false,\"in_reply_to_screen_name\":null,\"truncated\":false,\"lang\":\"en\",\"entities\":{\"symbols\":[],\"urls\":[],\"hashtags\":[],\"user_mentions\":[]},\"in_reply_to_status_id_str\":null,\"id\":67890,\"source\":\"web\",\"in_reply_
 to_user_id_str\":null,\"favorited\":false,\"in_reply_to_status_id\":null,\"retweet_count\":0,\"created_at\":\"Fri Apr 17 12:37:54 +0000 2009\",\"in_reply_to_user_id\":null,\"favorite_count\":0,\"id_str\":\"67890\",\"place\":null,\"coordinates\":null},\"profile_background_image_url\":\"http://abs.twimg.com/profile_background_image_url.png\",\"profile_background_image_url_https\":\"https://abs.twimg.com/images/profile_background_image_url_https.png\",\"follow_request_sent\":false,\"entities\":{\"description\":{\"urls\":[]}},\"url\":null,\"utc_offset\":null,\"time_zone\":null,\"notifications\":false,\"profile_use_background_image\":true,\"friends_count\":1,\"profile_sidebar_fill_color\":\"DDEEF6\",\"screen_name\":\"screen_name\",\"id_str\":\"67890\",\"profile_image_url\":\"http://abs.twimg.com/sticky/default_profile_images/default_profile_1_normal.png\",\"listed_count\":0,\"is_translator\":false}";
 
-    @Test
-    public void testDetectTweet() {
-        List<Class> detected = new TwitterDocumentClassifier().detectClasses(tweet);
-        Assert.assertTrue(detected.size() == 1);
-        Class result = detected.get(0);
-        if( !result.equals(Tweet.class) )
-            Assert.fail();
+  @Test
+  public void testDetectTweet() {
+    List<Class> detected = new TwitterDocumentClassifier().detectClasses(tweet);
+    Assert.assertTrue(detected.size() == 1);
+    Class result = detected.get(0);
+    if ( !result.equals(Tweet.class) ) {
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void testDetectRetweet() {
-        List<Class> detected = new TwitterDocumentClassifier().detectClasses(retweet);
-        Assert.assertTrue(detected.size() == 1);
-        Class result = detected.get(0);
-        if( !result.equals(Retweet.class) )
-            Assert.fail();
+  @Test
+  public void testDetectRetweet() {
+    List<Class> detected = new TwitterDocumentClassifier().detectClasses(retweet);
+    Assert.assertTrue(detected.size() == 1);
+    Class result = detected.get(0);
+    if ( !result.equals(Retweet.class) ) {
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void testDetectDelete() {
-        List<Class> detected = new TwitterDocumentClassifier().detectClasses(delete);
-        Assert.assertTrue(detected.size() == 1);
-        Class result = detected.get(0);
-        if( !result.equals(Delete.class) )
-            Assert.fail();
+  @Test
+  public void testDetectDelete() {
+    List<Class> detected = new TwitterDocumentClassifier().detectClasses(delete);
+    Assert.assertTrue(detected.size() == 1);
+    Class result = detected.get(0);
+    if ( !result.equals(Delete.class) ) {
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void testDetectFollow() {
-        List<Class> detected = new TwitterDocumentClassifier().detectClasses(follow);
-        Assert.assertTrue(detected.size() == 1);
-        Class result = detected.get(0);
-        if( !result.equals(Follow.class) )
-            Assert.fail();
+  @Test
+  public void testDetectFollow() {
+    List<Class> detected = new TwitterDocumentClassifier().detectClasses(follow);
+    Assert.assertTrue(detected.size() == 1);
+    Class result = detected.get(0);
+    if ( !result.equals(Follow.class) ) {
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void testDetectUser() {
-        List<Class> detected = new TwitterDocumentClassifier().detectClasses(user);
-        Assert.assertTrue(detected.size() == 1);
-        Class result = detected.get(0);
-        if (!result.equals(User.class))
-            Assert.fail();
+  @Test
+  public void testDetectUser() {
+    List<Class> detected = new TwitterDocumentClassifier().detectClasses(user);
+    Assert.assertTrue(detected.size() == 1);
+    Class result = detected.get(0);
+    if (!result.equals(User.class)) {
+      Assert.fail();
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/processor/YoutubeTypeConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/processor/YoutubeTypeConverter.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/processor/YoutubeTypeConverter.java
index 5e24882..6e269e5 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/processor/YoutubeTypeConverter.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/processor/YoutubeTypeConverter.java
@@ -18,6 +18,11 @@
 
 package com.youtube.processor;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import com.google.api.services.youtube.model.Channel;
 import com.google.api.services.youtube.model.Video;
@@ -27,10 +32,6 @@ import com.youtube.serializer.YoutubeChannelDeserializer;
 import com.youtube.serializer.YoutubeEventClassifier;
 import com.youtube.serializer.YoutubeVideoDeserializer;
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,89 +40,90 @@ import java.util.Queue;
 
 public class YoutubeTypeConverter implements StreamsProcessor {
 
-    public final static String STREAMS_ID = "YoutubeTypeConverter";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(YoutubeTypeConverter.class);
-
-    private StreamsJacksonMapper mapper;
-    private Queue<Video> inQueue;
-    private Queue<StreamsDatum> outQueue;
-    private YoutubeActivityUtil youtubeActivityUtil;
-    private int count = 0;
-
-    public YoutubeTypeConverter() {}
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+  public static final String STREAMS_ID = "YoutubeTypeConverter";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeTypeConverter.class);
+
+  private StreamsJacksonMapper mapper;
+  private Queue<Video> inQueue;
+  private Queue<StreamsDatum> outQueue;
+  private YoutubeActivityUtil youtubeActivityUtil;
+  private int count = 0;
+
+  public YoutubeTypeConverter() {}
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public List<StreamsDatum> process(StreamsDatum streamsDatum) {
+    StreamsDatum result = null;
+
+    try {
+      Object item = streamsDatum.getDocument();
+
+      LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
+      Activity activity = null;
+
+      if (item instanceof String) {
+        item = deserializeItem(item);
+      }
+
+      if (item instanceof Video) {
+        activity = new Activity();
+        youtubeActivityUtil.updateActivity((Video)item, activity, streamsDatum.getId());
+      } else if (item instanceof Channel) {
+        activity = new Activity();
+        this.youtubeActivityUtil.updateActivity((Channel)item, activity, null);
+      } else {
+        throw new NotImplementedException("Type conversion not implement for type : " + item.getClass().getName());
+      }
+
+      if (activity != null) {
+        result = new StreamsDatum(activity);
+        count++;
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while converting Video to Activity: {}", ex);
     }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum streamsDatum) {
-        StreamsDatum result = null;
-
-        try {
-            Object item = streamsDatum.getDocument();
-
-            LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
-            Activity activity = null;
-
-            if(item instanceof String) {
-                item = deserializeItem(item);
-            }
-
-            if(item instanceof Video) {
-                activity = new Activity();
-                youtubeActivityUtil.updateActivity((Video)item, activity, streamsDatum.getId());
-            } else if(item instanceof Channel) {
-                activity = new Activity();
-                this.youtubeActivityUtil.updateActivity((Channel)item, activity, null);
-            } else {
-                throw new NotImplementedException("Type conversion not implement for type : "+item.getClass().getName());
-            }
-
-            if(activity != null) {
-                result = new StreamsDatum(activity);
-                count++;
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while converting Video to Activity: {}", e);
-        }
-
-        if( result != null )
-            return Lists.newArrayList(result);
-        else
-            return Lists.newArrayList();
+    if ( result != null ) {
+      return Lists.newArrayList(result);
+    } else {
+      return Lists.newArrayList();
     }
-
-    private Object deserializeItem(Object item) {
-        try {
-            Class klass = YoutubeEventClassifier.detectClass((String) item);
-            if (klass.equals(Video.class)) {
-                item = mapper.readValue((String) item, Video.class);
-            } else if(klass.equals(Channel.class)) {
-                item = mapper.readValue((String) item, Channel.class);
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to deserializeItem: {}", e);
-        }
-
-        return item;
+  }
+
+  private Object deserializeItem(Object item) {
+    try {
+      Class klass = YoutubeEventClassifier.detectClass((String) item);
+      if (klass.equals(Video.class)) {
+        item = mapper.readValue((String) item, Video.class);
+      } else if (klass.equals(Channel.class)) {
+        item = mapper.readValue((String) item, Channel.class);
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to deserializeItem: {}", ex);
     }
 
-    @Override
-    public void prepare(Object o) {
-        youtubeActivityUtil = new YoutubeActivityUtil();
-        mapper = StreamsJacksonMapper.getInstance();
-
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Video.class, new YoutubeVideoDeserializer());
-        mapper.registerModule(simpleModule);
-        simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Channel.class, new YoutubeChannelDeserializer());
-        mapper.registerModule(simpleModule);
-    }
+    return item;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    youtubeActivityUtil = new YoutubeActivityUtil();
+    mapper = StreamsJacksonMapper.getInstance();
+
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Video.class, new YoutubeVideoDeserializer());
+    mapper.registerModule(simpleModule);
+    simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Channel.class, new YoutubeChannelDeserializer());
+    mapper.registerModule(simpleModule);
+  }
 
-    @Override
-    public void cleanUp() {}
+  @Override
+  public void cleanUp() {}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelDataCollector.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelDataCollector.java
index 8e980a7..fd238db 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelDataCollector.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelDataCollector.java
@@ -19,18 +19,17 @@
 
 package com.youtube.provider;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.api.client.googleapis.json.GoogleJsonResponseException;
-import com.google.api.client.http.HttpRequest;
 import com.google.api.services.youtube.YouTube;
 import com.google.api.services.youtube.model.Channel;
-import com.google.api.services.youtube.model.ChannelListResponse;
 import com.google.gson.Gson;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 import org.apache.youtube.pojo.YoutubeConfiguration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -39,63 +38,77 @@ import java.util.List;
 import java.util.concurrent.BlockingQueue;
 
 /**
- *
+ * Collects YoutubeChannelData on behalf of YoutubeChannelProvider.
  */
-public class YoutubeChannelDataCollector extends YoutubeDataCollector{
+public class YoutubeChannelDataCollector extends YoutubeDataCollector {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeChannelDataCollector.class);
-    private static final String CONTENT = "snippet,contentDetails,statistics,topicDetails";
-    private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-    private static final int MAX_ATTEMPTS= 5;
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeChannelDataCollector.class);
+  private static final String CONTENT = "snippet,contentDetails,statistics,topicDetails";
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static final int MAX_ATTEMPTS = 5;
 
-    private YouTube youTube;
-    private BlockingQueue<StreamsDatum> queue;
-    private BackOffStrategy strategy;
-    private UserInfo userInfo;
-    private YoutubeConfiguration youtubeConfig;
+  private YouTube youTube;
+  private BlockingQueue<StreamsDatum> queue;
+  private BackOffStrategy strategy;
+  private UserInfo userInfo;
+  private YoutubeConfiguration youtubeConfig;
 
-    public YoutubeChannelDataCollector(YouTube youTube, BlockingQueue<StreamsDatum> queue, BackOffStrategy strategy, UserInfo userInfo, YoutubeConfiguration youtubeConfig) {
-        this.youTube = youTube;
-        this.queue = queue;
-        this.strategy = strategy;
-        this.userInfo = userInfo;
-        this.youtubeConfig = youtubeConfig;
-    }
+  /**
+   * YoutubeChannelDataCollector constructor.
+   * @param youTube YouTube
+   * @param queue BlockingQueue of StreamsDatum
+   * @param strategy BackOffStrategy
+   * @param userInfo UserInfo
+   * @param youtubeConfig YoutubeConfiguration
+   */
+  public YoutubeChannelDataCollector(
+      YouTube youTube,
+      BlockingQueue<StreamsDatum> queue,
+      BackOffStrategy strategy,
+      UserInfo userInfo,
+      YoutubeConfiguration youtubeConfig) {
+    this.youTube = youTube;
+    this.queue = queue;
+    this.strategy = strategy;
+    this.userInfo = userInfo;
+    this.youtubeConfig = youtubeConfig;
+  }
 
-    @Override
-    public void run() {
-        Gson gson = new Gson();
+  @Override
+  public void run() {
+    Gson gson = new Gson();
+    try {
+      int attempt = 0;
+      YouTube.Channels.List channelLists = this.youTube.channels().list(CONTENT).setId(this.userInfo.getUserId()).setKey(this.youtubeConfig.getApiKey());
+      boolean tryAgain = false;
+      do {
         try {
-            int attempt = 0;
-             YouTube.Channels.List channelLists = this.youTube.channels().list(CONTENT).setId(this.userInfo.getUserId()).setKey(this.youtubeConfig.getApiKey());
-            boolean tryAgain = false;
-            do {
-                try {
-                    List<Channel> channels = channelLists.execute().getItems();
-                    for (Channel channel : channels) {
-                        String json = gson.toJson(channel);
-                        this.queue.put(new StreamsDatum(json, channel.getId()));
-                    }
-                    if (StringUtils.isEmpty(channelLists.getPageToken())) {
-                        channelLists = null;
-                    } else {
-                        channelLists = this.youTube.channels().list(CONTENT).setId(this.userInfo.getUserId()).setOauthToken(this.youtubeConfig.getApiKey())
-                                .setPageToken(channelLists.getPageToken());
-                    }
-                } catch (GoogleJsonResponseException gjre) {
-                    LOGGER.warn("GoogleJsonResposneException caught : {}", gjre);
-                    tryAgain = backoffAndIdentifyIfRetry(gjre, this.strategy);
-                    ++attempt;
-                } catch (Throwable t) {
-                    LOGGER.warn("Unable to get channel info for id : {}", this.userInfo.getUserId());
-                    LOGGER.warn("Excpection thrown while trying to get channel info : {}", t);
-                }
-            } while((tryAgain && attempt < MAX_ATTEMPTS) || channelLists != null);
-
-        } catch (Throwable t) {
-            LOGGER.warn(t.getMessage());
+          List<Channel> channels = channelLists.execute().getItems();
+          for (Channel channel : channels) {
+            String json = gson.toJson(channel);
+            this.queue.put(new StreamsDatum(json, channel.getId()));
+          }
+          if (StringUtils.isEmpty(channelLists.getPageToken())) {
+            channelLists = null;
+          } else {
+            channelLists = this.youTube.channels().list(CONTENT).setId(this.userInfo.getUserId()).setOauthToken(this.youtubeConfig.getApiKey())
+                .setPageToken(channelLists.getPageToken());
+          }
+        } catch (GoogleJsonResponseException gjre) {
+          LOGGER.warn("GoogleJsonResposneException caught : {}", gjre);
+          tryAgain = backoffAndIdentifyIfRetry(gjre, this.strategy);
+          ++attempt;
+        } catch (Throwable throwable) {
+          LOGGER.warn("Unable to get channel info for id : {}", this.userInfo.getUserId());
+          LOGGER.warn("Excpection thrown while trying to get channel info : {}", throwable);
         }
+      }
+      while ((tryAgain && attempt < MAX_ATTEMPTS) || channelLists != null);
+
+    } catch (Throwable throwable) {
+      LOGGER.warn(throwable.getMessage());
     }
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelProvider.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelProvider.java
index 817c98e..d9b1e14 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelProvider.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeChannelProvider.java
@@ -19,22 +19,22 @@
 
 package com.youtube.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfiguration;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.api.services.youtube.YouTube;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import com.google.common.util.concurrent.Uninterruptibles;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigParseOptions;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 import org.apache.youtube.pojo.YoutubeConfiguration;
 
 import java.io.BufferedOutputStream;
@@ -47,76 +47,86 @@ import java.util.concurrent.TimeUnit;
 
 /**
  *  Retrieve recent activity from a list of channels.
- *
- *  To use from command line:
- *
- *  Supply (at least) the following required configuration in application.conf:
- *
- *  youtube.oauth.pathToP12KeyFile
- *  youtube.oauth.serviceAccountEmailAddress
- *  youtube.apiKey
- *  youtube.youtubeUsers
- *
- *  Launch using:
- *
- *  mvn exec:java -Dexec.mainClass=org.apache.streams.youtube.provider.YoutubeUserActivityProvider -Dexec.args="application.conf tweets.json"
  */
 public class YoutubeChannelProvider extends YoutubeProvider {
 
-    public YoutubeChannelProvider() {
-        super();
-    }
+  public YoutubeChannelProvider() {
+    super();
+  }
 
-    public YoutubeChannelProvider(YoutubeConfiguration config) {
-        super(config);
-    }
+  public YoutubeChannelProvider(YoutubeConfiguration config) {
+    super(config);
+  }
 
-    @Override
-    protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, YouTube youtube, UserInfo userInfo) {
-        return new YoutubeChannelDataCollector(youtube, queue, strategy, userInfo, this.config);
-    }
+  @Override
+  protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, YouTube youtube, UserInfo userInfo) {
+    return new YoutubeChannelDataCollector(youtube, queue, strategy, userInfo, this.config);
+  }
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply (at least) the following required configuration in application.conf:
+   *
+   * <p/>
+   * youtube.oauth.pathToP12KeyFile
+   * youtube.oauth.serviceAccountEmailAddress
+   * youtube.apiKey
+   * youtube.youtubeUsers
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.youtube.provider.YoutubeUserActivityProvider -Dexec.args="application.conf tweets.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    YoutubeConfiguration config = new ComponentConfigurator<>(YoutubeConfiguration.class).detectConfiguration(typesafe, "youtube");
+    YoutubeChannelProvider provider = new YoutubeChannelProvider(config);
+
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        YoutubeConfiguration config = new ComponentConfigurator<>(YoutubeConfiguration.class).detectConfiguration(typesafe, "youtube");
-        YoutubeChannelProvider provider = new YoutubeChannelProvider(config);
-
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    if( datum.getDocument() instanceof String )
-                        json = (String)datum.getDocument();
-                    else
-                        json = mapper.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          if ( datum.getDocument() instanceof String ) {
+            json = (String) datum.getDocument();
+          } else {
+            json = mapper.writeValueAsString(datum.getDocument());
+          }
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
+        }
+      }
     }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeDataCollector.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeDataCollector.java
index 3a17134..3eede18 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeDataCollector.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeDataCollector.java
@@ -19,47 +19,52 @@
 
 package com.youtube.provider;
 
-import com.google.api.client.googleapis.json.GoogleJsonResponseException;
 import org.apache.streams.util.api.requests.backoff.BackOffException;
 import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
+import com.google.api.client.googleapis.json.GoogleJsonResponseException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+/**
+ * Base Collector for Youtube Data.
+ */
 public abstract class YoutubeDataCollector implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeDataCollector.class);
-
-    /**
-     * Looks at the status code of the expception.  If the code indicates that the request should be retried,
-     * it executes the back off strategy and returns true.
-     * @param gjre
-     * @param backOff
-     * @return returns true if the error code of the exception indicates the request should be retried.
-     */
-    public boolean backoffAndIdentifyIfRetry(GoogleJsonResponseException gjre, BackOffStrategy backOff) throws BackOffException {
-        boolean tryAgain = false;
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeDataCollector.class);
 
-        switch (gjre.getStatusCode()) {
-            case 400 :
-                LOGGER.warn("Bad Request  : {}",  gjre);
-                break;
-            case 401 :
-                LOGGER.warn("Invalid Credentials : {}", gjre);
-            case 403 :
-                LOGGER.warn("Possible rate limit exception. Retrying. : {}", gjre.getMessage());
-                backOff.backOff();
-                tryAgain = true;
-                break;
-            case 503 :
-                LOGGER.warn("Google Backend Service Error : {}", gjre);
-                break;
-            default:
-                LOGGER.warn("Google Service returned error : {}", gjre);
-                tryAgain = true;
-                backOff.backOff();
-                break;
-        }
+  /**
+   * Looks at the status code of the expception.  If the code indicates that the request should be retried,
+   * it executes the back off strategy and returns true.
+   * @param gjre
+   * @param backOff
+   * @return returns true if the error code of the exception indicates the request should be retried.
+   */
+  public boolean backoffAndIdentifyIfRetry(GoogleJsonResponseException gjre, BackOffStrategy backOff) throws BackOffException {
+    boolean tryAgain = false;
 
-        return tryAgain;
+    switch (gjre.getStatusCode()) {
+      case 400 :
+        LOGGER.warn("Bad Request  : {}",  gjre);
+        break;
+      case 401 :
+        LOGGER.warn("Invalid Credentials : {}", gjre);
+        break;
+      case 403 :
+        LOGGER.warn("Possible rate limit exception. Retrying. : {}", gjre.getMessage());
+        backOff.backOff();
+        tryAgain = true;
+        break;
+      case 503 :
+        LOGGER.warn("Google Backend Service Error : {}", gjre);
+        break;
+      default:
+        LOGGER.warn("Google Service returned error : {}", gjre);
+        tryAgain = true;
+        backOff.backOff();
+        break;
     }
+
+    return tryAgain;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeProvider.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeProvider.java
index ab77467..1442f8b 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeProvider.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeProvider.java
@@ -19,6 +19,16 @@
 
 package com.youtube.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProvider;
+import org.apache.streams.core.StreamsResultSet;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.util.ComponentUtils;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+import org.apache.streams.util.api.requests.backoff.impl.ExponentialBackOffStrategy;
+
 import com.google.api.client.auth.oauth2.Credential;
 import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
 import com.google.api.client.http.HttpTransport;
@@ -34,15 +44,6 @@ import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProvider;
-import org.apache.streams.core.StreamsResultSet;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.util.ComponentUtils;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
-import org.apache.streams.util.api.requests.backoff.impl.ExponentialBackOffStrategy;
 import org.apache.youtube.pojo.YoutubeConfiguration;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
@@ -64,205 +65,213 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 public abstract class YoutubeProvider implements StreamsProvider {
 
-    public static final String STREAMS_ID = "YoutubeProvider";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(YoutubeProvider.class);
-    private final static int MAX_BATCH_SIZE = 1000;
-
-    // This OAuth 2.0 access scope allows for full read/write access to the
-    // authenticated user's account.
-    private List<String> scopes = Lists.newArrayList("https://www.googleapis.com/auth/youtube");
-
-    /**
-     * Define a global instance of the HTTP transport.
-     */
-    public static final HttpTransport HTTP_TRANSPORT = new NetHttpTransport();
-
-    /**
-     * Define a global instance of the JSON factory.
-     */
-    public static final JsonFactory JSON_FACTORY = new JacksonFactory();
-
-    private static final int DEFAULT_THREAD_POOL_SIZE = 5;
-
-    private List<ListenableFuture<Object>> futures = new ArrayList<>();
-
-    private ListeningExecutorService executor;
-    private BlockingQueue<StreamsDatum> datumQueue;
-    private AtomicBoolean isComplete;
-    private boolean previousPullWasEmpty;
-
-    protected YouTube youtube;
-    protected YoutubeConfiguration config;
-
-    public YoutubeProvider() {
-        this.config = new ComponentConfigurator<>(YoutubeConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("youtube"));
-
-        Preconditions.checkNotNull(this.config.getApiKey());
-    }
-
-    public YoutubeProvider(YoutubeConfiguration config) {
-        this.config = config;
-
-        Preconditions.checkNotNull(this.config.getApiKey());
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+  public static final String STREAMS_ID = "YoutubeProvider";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeProvider.class);
+  private static final int MAX_BATCH_SIZE = 1000;
+
+  // This OAuth 2.0 access scope allows for full read/write access to the
+  // authenticated user's account.
+  private List<String> scopes = Lists.newArrayList("https://www.googleapis.com/auth/youtube");
+
+  /**
+   * Define a global instance of the HTTP transport.
+   */
+  public static final HttpTransport HTTP_TRANSPORT = new NetHttpTransport();
+
+  /**
+   * Define a global instance of the JSON factory.
+   */
+  public static final JsonFactory JSON_FACTORY = new JacksonFactory();
+
+  private static final int DEFAULT_THREAD_POOL_SIZE = 5;
+
+  private List<ListenableFuture<Object>> futures = new ArrayList<>();
+
+  private ListeningExecutorService executor;
+  private BlockingQueue<StreamsDatum> datumQueue;
+  private AtomicBoolean isComplete;
+  private boolean previousPullWasEmpty;
+
+  protected YouTube youtube;
+  protected YoutubeConfiguration config;
+
+  /**
+   * YoutubeProvider constructor.
+   * Resolves config from JVM 'youtube'.
+   */
+  public YoutubeProvider() {
+    this.config = new ComponentConfigurator<>(YoutubeConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("youtube"));
+
+    Preconditions.checkNotNull(this.config.getApiKey());
+  }
+
+  /**
+   * YoutubeProvider constructor - uses supplied YoutubeConfiguration.
+   * @param config YoutubeConfiguration
+   */
+  public YoutubeProvider(YoutubeConfiguration config) {
+    this.config = config;
+
+    Preconditions.checkNotNull(this.config.getApiKey());
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    try {
+      this.youtube = createYouTubeClient();
+    } catch (IOException | GeneralSecurityException ex) {
+      LOGGER.error("Failed to created oauth for YouTube : {}", ex);
+      throw new RuntimeException(ex);
     }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        try {
-            this.youtube = createYouTubeClient();
-        } catch (IOException |GeneralSecurityException e) {
-            LOGGER.error("Failed to created oauth for YouTube : {}", e);
-            throw new RuntimeException(e);
-        }
-
-        this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE));
-        this.datumQueue = new LinkedBlockingQueue<>(1000);
-        this.isComplete = new AtomicBoolean(false);
-        this.previousPullWasEmpty = false;
+    this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE));
+    this.datumQueue = new LinkedBlockingQueue<>(1000);
+    this.isComplete = new AtomicBoolean(false);
+    this.previousPullWasEmpty = false;
+  }
+
+  @Override
+  public void startStream() {
+    BackOffStrategy backOffStrategy = new ExponentialBackOffStrategy(2);
+
+    for (UserInfo user : this.config.getYoutubeUsers()) {
+      if (this.config.getDefaultAfterDate() != null && user.getAfterDate() == null) {
+        user.setAfterDate(this.config.getDefaultAfterDate());
+      }
+      if (this.config.getDefaultBeforeDate() != null && user.getBeforeDate() == null) {
+        user.setBeforeDate(this.config.getDefaultBeforeDate());
+      }
+
+      ListenableFuture future = executor.submit(getDataCollector(backOffStrategy, this.datumQueue, this.youtube, user));
+      futures.add(future);
     }
 
-    @Override
-    public void startStream() {
-        BackOffStrategy backOffStrategy = new ExponentialBackOffStrategy(2);
-
-        for(UserInfo user : this.config.getYoutubeUsers()) {
-            if(this.config.getDefaultAfterDate() != null && user.getAfterDate() == null) {
-                user.setAfterDate(this.config.getDefaultAfterDate());
-            }
-            if(this.config.getDefaultBeforeDate() != null && user.getBeforeDate() == null) {
-                user.setBeforeDate(this.config.getDefaultBeforeDate());
-            }
-
-            ListenableFuture future = executor.submit(getDataCollector(backOffStrategy, this.datumQueue, this.youtube, user));
-            futures.add(future);
-        }
-
-        this.executor.shutdown();
+    this.executor.shutdown();
+  }
+
+  protected abstract Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, YouTube youtube, UserInfo userInfo);
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    BlockingQueue<StreamsDatum> batch = new LinkedBlockingQueue<>();
+    int batchCount = 0;
+    while (!this.datumQueue.isEmpty() && batchCount < MAX_BATCH_SIZE) {
+      StreamsDatum datum = ComponentUtils.pollWhileNotEmpty(this.datumQueue);
+      if (datum != null) {
+        ++batchCount;
+        ComponentUtils.offerUntilSuccess(datum, batch);
+      }
     }
-
-    protected abstract Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, YouTube youtube, UserInfo userInfo);
-
-    @Override
-    public StreamsResultSet readCurrent() {
-        BlockingQueue<StreamsDatum> batch = new LinkedBlockingQueue<>();
-        int batchCount = 0;
-        while(!this.datumQueue.isEmpty() && batchCount < MAX_BATCH_SIZE) {
-            StreamsDatum datum = ComponentUtils.pollWhileNotEmpty(this.datumQueue);
-            if(datum != null) {
-                ++batchCount;
-                ComponentUtils.offerUntilSuccess(datum, batch);
-            }
-        }
-        return new StreamsResultSet(batch);
+    return new StreamsResultSet(batch);
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @VisibleForTesting
+  protected YouTube createYouTubeClient() throws IOException, GeneralSecurityException {
+    GoogleCredential.Builder credentialBuilder = new GoogleCredential.Builder()
+        .setTransport(HTTP_TRANSPORT)
+        .setJsonFactory(JSON_FACTORY)
+        .setServiceAccountId(getConfig().getOauth().getServiceAccountEmailAddress())
+        .setServiceAccountScopes(scopes);
+
+    if ( !Strings.isNullOrEmpty(getConfig().getOauth().getPathToP12KeyFile())) {
+      File p12KeyFile = new File(getConfig().getOauth().getPathToP12KeyFile());
+      if ( p12KeyFile.exists() && p12KeyFile.isFile() && p12KeyFile.canRead()) {
+        credentialBuilder = credentialBuilder.setServiceAccountPrivateKeyFromP12File(p12KeyFile);
+      }
     }
-
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
-
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
+    Credential credential = credentialBuilder.build();
+    return new YouTube.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential).setApplicationName("Streams Application").build();
+  }
+
+  @Override
+  public void cleanUp() {
+    ComponentUtils.shutdownExecutor(this.executor, 10, 10);
+    this.executor = null;
+  }
+
+  public YoutubeConfiguration getConfig() {
+    return config;
+  }
+
+  public void setConfig(YoutubeConfiguration config) {
+    this.config = config;
+  }
+
+  /**
+   * Set and overwrite the default before date that was read from the configuration file.
+   * @param defaultBeforeDate defaultBeforeDate
+   */
+  public void setDefaultBeforeDate(DateTime defaultBeforeDate) {
+    this.config.setDefaultBeforeDate(defaultBeforeDate);
+  }
+
+  /**
+   * Set and overwrite the default after date that was read from teh configuration file.
+   * @param defaultAfterDate defaultAfterDate
+   */
+  public void setDefaultAfterDate(DateTime defaultAfterDate) {
+    this.config.setDefaultAfterDate(defaultAfterDate);
+  }
+
+  /**
+   * Sets and overwrite the user info from the configuaration file.  Uses the defaults before and after dates.
+   * @param userIds Set of String userIds
+   */
+  public void setUserInfoWithDefaultDates(Set<String> userIds) {
+    List<UserInfo> youtubeUsers = new LinkedList<>();
+
+    for (String userId : userIds) {
+      UserInfo user = new UserInfo();
+      user.setUserId(userId);
+      user.setAfterDate(this.config.getDefaultAfterDate());
+      user.setBeforeDate(this.config.getDefaultBeforeDate());
+      youtubeUsers.add(user);
     }
 
-    @VisibleForTesting
-    protected YouTube createYouTubeClient() throws IOException, GeneralSecurityException {
-        GoogleCredential.Builder credentialBuilder = new GoogleCredential.Builder()
-                .setTransport(HTTP_TRANSPORT)
-                .setJsonFactory(JSON_FACTORY)
-                .setServiceAccountId(getConfig().getOauth().getServiceAccountEmailAddress())
-                .setServiceAccountScopes(scopes);
-
-        if( !Strings.isNullOrEmpty(getConfig().getOauth().getPathToP12KeyFile())) {
-            File p12KeyFile = new File(getConfig().getOauth().getPathToP12KeyFile());
-            if( p12KeyFile.exists() && p12KeyFile.isFile() && p12KeyFile.canRead()) {
-                credentialBuilder = credentialBuilder.setServiceAccountPrivateKeyFromP12File(p12KeyFile);
-            }
-        }
-        Credential credential = credentialBuilder.build();
-        return new YouTube.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential).setApplicationName("Streams Application").build();
+    this.config.setYoutubeUsers(youtubeUsers);
+  }
+
+  /**
+   * Set and overwrite user into from teh configuration file. Only sets after dater.
+   * @param usersAndAfterDates usersAndAfterDates
+   */
+  public void setUserInfoWithAfterDate(Map<String, DateTime> usersAndAfterDates) {
+    List<UserInfo> youtubeUsers = new LinkedList<>();
+
+    for (String userId : usersAndAfterDates.keySet()) {
+      UserInfo user = new UserInfo();
+      user.setUserId(userId);
+      user.setAfterDate(usersAndAfterDates.get(userId));
+      youtubeUsers.add(user);
     }
 
-    @Override
-    public void cleanUp() {
-        ComponentUtils.shutdownExecutor(this.executor, 10, 10);
-        this.executor = null;
-    }
-
-    public YoutubeConfiguration getConfig() {
-        return config;
-    }
-
-    public void setConfig(YoutubeConfiguration config) {
-        this.config = config;
-    }
-
-    /**
-     * Set and overwrite the default before date that was read from the configuration file.
-     * @param defaultBeforeDate
-     */
-    public void setDefaultBeforeDate(DateTime defaultBeforeDate) {
-        this.config.setDefaultBeforeDate(defaultBeforeDate);
-    }
-
-    /**
-     * Set and overwrite the default after date that was read from teh configuration file.
-     * @param defaultAfterDate
-     */
-    public void setDefaultAfterDate(DateTime defaultAfterDate) {
-        this.config.setDefaultAfterDate(defaultAfterDate);
-    }
-
-    /**
-     * Sets and overwrite the user info from the configuaration file.  Uses the defaults before and after dates.
-     * @param userIds
-     */
-    public void setUserInfoWithDefaultDates(Set<String> userIds) {
-        List<UserInfo> youtubeUsers = new LinkedList<>();
-
-        for(String userId : userIds) {
-            UserInfo user = new UserInfo();
-            user.setUserId(userId);
-            user.setAfterDate(this.config.getDefaultAfterDate());
-            user.setBeforeDate(this.config.getDefaultBeforeDate());
-            youtubeUsers.add(user);
-        }
-
-        this.config.setYoutubeUsers(youtubeUsers);
-    }
-
-    /**
-     * Set and overwrite user into from teh configuration file. Only sets after dater.
-     * @param usersAndAfterDates
-     */
-    public void setUserInfoWithAfterDate(Map<String, DateTime> usersAndAfterDates) {
-        List<UserInfo> youtubeUsers = new LinkedList<>();
-
-        for(String userId : usersAndAfterDates.keySet()) {
-            UserInfo user = new UserInfo();
-            user.setUserId(userId);
-            user.setAfterDate(usersAndAfterDates.get(userId));
-            youtubeUsers.add(user);
-        }
-
-        this.config.setYoutubeUsers(youtubeUsers);
-    }
+    this.config.setYoutubeUsers(youtubeUsers);
+  }
 
-    @Override
-    public boolean isRunning() {
-        if (datumQueue.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
-            LOGGER.info("Completed");
-            isComplete.set(true);
-            LOGGER.info("Exiting");
-        }
-        return !isComplete.get();
+  @Override
+  public boolean isRunning() {
+    if (datumQueue.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
+      LOGGER.info("Completed");
+      isComplete.set(true);
+      LOGGER.info("Exiting");
     }
+    return !isComplete.get();
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityCollector.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityCollector.java
index 76a69f3..9975dd9 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityCollector.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityCollector.java
@@ -19,6 +19,11 @@
 
 package com.youtube.provider;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.module.SimpleModule;
@@ -29,10 +34,6 @@ import com.google.api.services.youtube.model.ActivityListResponse;
 import com.google.api.services.youtube.model.Video;
 import com.google.api.services.youtube.model.VideoListResponse;
 import com.google.gson.Gson;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 import org.apache.youtube.pojo.YoutubeConfiguration;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
@@ -42,169 +43,187 @@ import java.io.IOException;
 import java.util.List;
 import java.util.concurrent.BlockingQueue;
 
+/**
+ * YoutubeDataCollector for YoutubeUserActivityProvider.
+ */
 public class YoutubeUserActivityCollector extends YoutubeDataCollector {
-    /**
-     * Max results allowed per request
-     * https://developers.google.com/+/api/latest/activities/list
-     */
-    private static final long MAX_RESULTS = 50;
-    private static final int MAX_ATTEMPTS = 5;
-    private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeUserActivityCollector.class);
-    private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-
-    static { //set up mapper for Google Activity Object
-        SimpleModule simpleModule = new SimpleModule();
-        MAPPER.registerModule(simpleModule);
-        MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-    }
 
-    private BlockingQueue<StreamsDatum> datumQueue;
-    private BackOffStrategy backOff;
-    private YouTube youtube;
-    private UserInfo userInfo;
-    private YoutubeConfiguration config;
-
-    Gson gson = new Gson();
-
-    public YoutubeUserActivityCollector(YouTube youtube, BlockingQueue<StreamsDatum> datumQueue, BackOffStrategy backOff, UserInfo userInfo, YoutubeConfiguration config) {
-        this.youtube = youtube;
-        this.datumQueue = datumQueue;
-        this.backOff = backOff;
-        this.userInfo = userInfo;
-        this.config = config;
-    }
-
-    @Override
-    public void run() {
-        collectActivityData();
-    }
-
-    /**
-     * Iterate through all users in the Youtube configuration and collect all videos
-     * associated with their accounts.
-     */
-    protected void collectActivityData() {
+  /**
+   * Max results allowed per request
+   * https://developers.google.com/+/api/latest/activities/list
+   */
+  private static final long MAX_RESULTS = 50;
+  private static final int MAX_ATTEMPTS = 5;
+  private static final Logger LOGGER = LoggerFactory.getLogger(YoutubeUserActivityCollector.class);
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  static { //set up mapper for Google Activity Object
+    SimpleModule simpleModule = new SimpleModule();
+    MAPPER.registerModule(simpleModule);
+    MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
+
+  private BlockingQueue<StreamsDatum> datumQueue;
+  private BackOffStrategy backOff;
+  private YouTube youtube;
+  private UserInfo userInfo;
+  private YoutubeConfiguration config;
+
+  Gson gson = new Gson();
+
+  /**
+   * YoutubeUserActivityCollector constructor.
+   * @param youtube YouTube
+   * @param datumQueue BlockingQueue of StreamsDatum
+   * @param backOff BackOffStrategy
+   * @param userInfo UserInfo
+   * @param config YoutubeConfiguration
+   */
+  public YoutubeUserActivityCollector(
+      YouTube youtube,
+      BlockingQueue<StreamsDatum> datumQueue,
+      BackOffStrategy backOff,
+      UserInfo userInfo,
+      YoutubeConfiguration config) {
+    this.youtube = youtube;
+    this.datumQueue = datumQueue;
+    this.backOff = backOff;
+    this.userInfo = userInfo;
+    this.config = config;
+  }
+
+  @Override
+  public void run() {
+    collectActivityData();
+  }
+
+  /**
+   * Iterate through all users in the Youtube configuration and collect all videos
+   * associated with their accounts.
+   */
+  protected void collectActivityData() {
+    try {
+      YouTube.Activities.List request = null;
+      ActivityListResponse feed = null;
+
+      boolean tryAgain = false;
+      int attempt = 0;
+      DateTime afterDate = userInfo.getAfterDate();
+      DateTime beforeDate = userInfo.getBeforeDate();
+
+      do {
         try {
-            YouTube.Activities.List request = null;
-            ActivityListResponse feed = null;
-
-            boolean tryAgain = false;
-            int attempt = 0;
-            DateTime afterDate = userInfo.getAfterDate();
-            DateTime beforeDate = userInfo.getBeforeDate();
-
-            do {
-                try {
-                    if(request == null) {
-                        request = this.youtube.activities().list("contentDetails")
-                                                            .setChannelId(userInfo.getUserId())
-                                                            .setMaxResults(MAX_RESULTS)
-                                                            .setKey(config.getApiKey());
-                        feed = request.execute();
-                    } else {
-                        request = this.youtube.activities().list("contentDetails")
-                                                            .setChannelId(userInfo.getUserId())
-                                                            .setMaxResults(MAX_RESULTS)
-                                                            .setPageToken(feed.getNextPageToken())
-                                                            .setKey(config.getApiKey());
-                        feed = request.execute();
-                    }
-                    this.backOff.reset(); //successful pull reset api.
-
-                    processActivityFeed(feed, afterDate, beforeDate);
-                } catch (GoogleJsonResponseException gjre) {
-                    tryAgain = backoffAndIdentifyIfRetry(gjre, this.backOff);
-                    ++attempt;
-                }
-            } while((tryAgain || (feed != null && feed.getNextPageToken() != null)) && attempt < MAX_ATTEMPTS);
-        } catch (Throwable t) {
-            if(t instanceof InterruptedException) {
-                Thread.currentThread().interrupt();
-            }
-            t.printStackTrace();
-            LOGGER.warn("Unable to pull Activities for user={} : {}",this.userInfo.getUserId(), t);
+          if (request == null) {
+            request = this.youtube.activities().list("contentDetails")
+                .setChannelId(userInfo.getUserId())
+                .setMaxResults(MAX_RESULTS)
+                .setKey(config.getApiKey());
+            feed = request.execute();
+          } else {
+            request = this.youtube.activities().list("contentDetails")
+                .setChannelId(userInfo.getUserId())
+                .setMaxResults(MAX_RESULTS)
+                .setPageToken(feed.getNextPageToken())
+                .setKey(config.getApiKey());
+            feed = request.execute();
+          }
+          this.backOff.reset(); //successful pull reset api.
+
+          processActivityFeed(feed, afterDate, beforeDate);
+        } catch (GoogleJsonResponseException gjre) {
+          tryAgain = backoffAndIdentifyIfRetry(gjre, this.backOff);
+          ++attempt;
         }
+      }
+      while ((tryAgain || (feed != null && feed.getNextPageToken() != null)) && attempt < MAX_ATTEMPTS);
+    } catch (Throwable throwable) {
+      if (throwable instanceof InterruptedException) {
+        Thread.currentThread().interrupt();
+      }
+      throwable.printStackTrace();
+      LOGGER.warn("Unable to pull Activities for user={} : {}",this.userInfo.getUserId(), throwable);
     }
-
-    /**
-     * Given a feed and an after and before date, fetch all relevant user videos
-     * and place them into the datumQueue for post-processing
-     * @param feed
-     * @param afterDate
-     * @param beforeDate
-     * @throws IOException
-     * @throws InterruptedException
-     */
-    void processActivityFeed(ActivityListResponse feed, DateTime afterDate, DateTime beforeDate) throws IOException, InterruptedException {
-        for(com.google.api.services.youtube.model.Activity activity : feed.getItems()) {
-            try {
-                List<Video> videos = Lists.newArrayList();
-
-                if (activity.getContentDetails().getUpload() != null) {
-                    videos.addAll(getVideoList(activity.getContentDetails().getUpload().getVideoId()));
-                }
-                if (activity.getContentDetails().getPlaylistItem() != null && activity.getContentDetails().getPlaylistItem().getResourceId() != null) {
-                    videos.addAll(getVideoList(activity.getContentDetails().getPlaylistItem().getResourceId().getVideoId()));
-                }
-
-                processVideos(videos, afterDate, beforeDate, activity, feed);
-            } catch (Exception e) {
-                LOGGER.error("Error while trying to process activity: {}, {}", activity, e);
-            }
+  }
+
+  /**
+   * Given a feed and an after and before date, fetch all relevant user videos
+   * and place them into the datumQueue for post-processing.
+   * @param feed ActivityListResponse
+   * @param afterDate DateTime
+   * @param beforeDate DateTime
+   * @throws IOException IOException
+   * @throws InterruptedException InterruptedException
+   */
+  void processActivityFeed(ActivityListResponse feed, DateTime afterDate, DateTime beforeDate) throws IOException, InterruptedException {
+    for (com.google.api.services.youtube.model.Activity activity : feed.getItems()) {
+      try {
+        List<Video> videos = Lists.newArrayList();
+
+        if (activity.getContentDetails().getUpload() != null) {
+          videos.addAll(getVideoList(activity.getContentDetails().getUpload().getVideoId()));
         }
-    }
-
-    /**
-     * Process a list of Video objects
-     * @param videos
-     * @param afterDate
-     * @param beforeDate
-     * @param activity
-     * @param feed
-     */
-    void processVideos(List<Video> videos, DateTime afterDate, DateTime beforeDate, com.google.api.services.youtube.model.Activity activity, ActivityListResponse feed) {
-        try {
-            for (Video video : videos) {
-                if (video != null) {
-                    org.joda.time.DateTime published = new org.joda.time.DateTime(video.getSnippet().getPublishedAt().getValue());
-                    if ((afterDate == null && beforeDate == null)
-                            || (beforeDate == null && afterDate.isBefore(published))
-                            || (afterDate == null && beforeDate.isAfter(published))
-                            || ((afterDate != null && beforeDate != null) && (afterDate.isAfter(published) && beforeDate.isBefore(published)))) {
-                        LOGGER.debug("Providing Youtube Activity: {}", MAPPER.writeValueAsString(video));
-                        this.datumQueue.put(new StreamsDatum(gson.toJson(video), activity.getId()));
-                    } else if (afterDate != null && afterDate.isAfter(published)) {
-                        feed.setNextPageToken(null); // do not fetch next page
-                        break;
-                    }
-                }
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to process video list: {}, {}", videos, e);
+        if (activity.getContentDetails().getPlaylistItem() != null && activity.getContentDetails().getPlaylistItem().getResourceId() != null) {
+          videos.addAll(getVideoList(activity.getContentDetails().getPlaylistItem().getResourceId().getVideoId()));
         }
-    }
 
-    /**
-     * Given a Youtube videoId, return the relevant Youtube Video object
-     * @param videoId
-     * @return
-     * @throws IOException
-     */
-    List<Video> getVideoList(String videoId) throws IOException {
-        VideoListResponse videosListResponse = this.youtube.videos().list("snippet,statistics")
-                                                                    .setId(videoId)
-                                                                    .setKey(config.getApiKey())
-                                                                    .execute();
-
-        if(videosListResponse.getItems().size() == 0) {
-            LOGGER.debug("No Youtube videos found for videoId: {}", videoId);
-            return Lists.newArrayList();
+        processVideos(videos, afterDate, beforeDate, activity, feed);
+      } catch (Exception ex) {
+        LOGGER.error("Error while trying to process activity: {}, {}", activity, ex);
+      }
+    }
+  }
+
+  /**
+   * Process a list of Video objects.
+   * @param videos List of Video
+   * @param afterDate afterDate
+   * @param beforeDate beforeDate
+   * @param activity com.google.api.services.youtube.model.Activity
+   * @param feed ActivityListResponse
+   */
+  void processVideos(List<Video> videos, DateTime afterDate, DateTime beforeDate, com.google.api.services.youtube.model.Activity activity, ActivityListResponse feed) {
+    try {
+      for (Video video : videos) {
+        if (video != null) {
+          org.joda.time.DateTime published = new org.joda.time.DateTime(video.getSnippet().getPublishedAt().getValue());
+          if ((afterDate == null && beforeDate == null)
+              || (beforeDate == null && afterDate.isBefore(published))
+              || (afterDate == null && beforeDate.isAfter(published))
+              || ((afterDate != null && beforeDate != null) && (afterDate.isAfter(published) && beforeDate.isBefore(published)))) {
+            LOGGER.debug("Providing Youtube Activity: {}", MAPPER.writeValueAsString(video));
+            this.datumQueue.put(new StreamsDatum(gson.toJson(video), activity.getId()));
+          } else if (afterDate != null && afterDate.isAfter(published)) {
+            feed.setNextPageToken(null); // do not fetch next page
+            break;
+          }
         }
-
-        return videosListResponse.getItems();
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to process video list: {}, {}", videos, ex);
     }
-
-    BlockingQueue<StreamsDatum> getDatumQueue() {
-        return this.datumQueue;
+  }
+
+  /**
+   * Given a Youtube videoId, return the relevant Youtube Video object.
+   * @param videoId videoId
+   * @return List of Video
+   * @throws IOException
+   */
+  List<Video> getVideoList(String videoId) throws IOException {
+    VideoListResponse videosListResponse = this.youtube.videos().list("snippet,statistics")
+        .setId(videoId)
+        .setKey(config.getApiKey())
+        .execute();
+
+    if (videosListResponse.getItems().size() == 0) {
+      LOGGER.debug("No Youtube videos found for videoId: {}", videoId);
+      return Lists.newArrayList();
     }
+
+    return videosListResponse.getItems();
+  }
+
+  BlockingQueue<StreamsDatum> getDatumQueue() {
+    return this.datumQueue;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityProvider.java b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityProvider.java
index ed3dc63..934a0e5 100644
--- a/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityProvider.java
+++ b/streams-contrib/streams-provider-youtube/src/main/java/com/youtube/provider/YoutubeUserActivityProvider.java
@@ -19,6 +19,14 @@
 
 package com.youtube.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfiguration;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.api.services.youtube.YouTube;
@@ -27,13 +35,6 @@ import com.google.common.util.concurrent.Uninterruptibles;
 import com.typesafe.config.Config;
 import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigParseOptions;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 import org.apache.youtube.pojo.YoutubeConfiguration;
 
 import java.io.BufferedOutputStream;
@@ -46,76 +47,86 @@ import java.util.concurrent.TimeUnit;
 
 /**
  *  Retrieve recent activity from a list of user ids or names.
- *
- *  To use from command line:
- *
- *  Supply (at least) the following required configuration in application.conf:
- *
- *  youtube.oauth.pathToP12KeyFile
- *  youtube.oauth.serviceAccountEmailAddress
- *  youtube.apiKey
- *  youtube.youtubeUsers
- *
- *  Launch using:
- *
- *  mvn exec:java -Dexec.mainClass=org.apache.streams.youtube.provider.YoutubeUserActivityProvider -Dexec.args="application.conf tweets.json"
  */
 public class YoutubeUserActivityProvider extends YoutubeProvider {
 
-    public YoutubeUserActivityProvider() {
-        super();
-    }
+  public YoutubeUserActivityProvider() {
+    super();
+  }
 
-    public YoutubeUserActivityProvider(YoutubeConfiguration config) {
-        super(config);
-    }
+  public YoutubeUserActivityProvider(YoutubeConfiguration config) {
+    super(config);
+  }
 
-    @Override
-    protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, YouTube youtube, UserInfo userInfo) {
-        return new YoutubeUserActivityCollector(youtube, queue, strategy, userInfo, config);
-    }
+  @Override
+  protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, YouTube youtube, UserInfo userInfo) {
+    return new YoutubeUserActivityCollector(youtube, queue, strategy, userInfo, config);
+  }
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply (at least) the following required configuration in application.conf:
+   *
+   * <p/>
+   * youtube.oauth.pathToP12KeyFile
+   * youtube.oauth.serviceAccountEmailAddress
+   * youtube.apiKey
+   * youtube.youtubeUsers
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java -Dexec.mainClass=org.apache.streams.youtube.provider.YoutubeUserActivityProvider -Dexec.args="application.conf tweets.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    YoutubeConfiguration config = new ComponentConfigurator<>(YoutubeConfiguration.class).detectConfiguration(typesafe, "youtube");
+    YoutubeUserActivityProvider provider = new YoutubeUserActivityProvider(config);
+
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        YoutubeConfiguration config = new ComponentConfigurator<>(YoutubeConfiguration.class).detectConfiguration(typesafe, "youtube");
-        YoutubeUserActivityProvider provider = new YoutubeUserActivityProvider(config);
-
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    if( datum.getDocument() instanceof String )
-                        json = (String)datum.getDocument();
-                    else
-                        json = mapper.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          if ( datum.getDocument() instanceof String ) {
+            json = (String) datum.getDocument();
+          } else {
+            json = mapper.writeValueAsString(datum.getDocument());
+          }
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
+        }
+      }
     }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 }



[26/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaProvider.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaProvider.java
index 9a31b5a..c68ef95 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaProvider.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/recentmedia/InstagramRecentMediaProvider.java
@@ -12,15 +12,9 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
+
 package org.apache.streams.instagram.provider.recentmedia;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.Uninterruptibles;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
@@ -29,6 +23,14 @@ import org.apache.streams.instagram.InstagramConfiguration;
 import org.apache.streams.instagram.provider.InstagramAbstractProvider;
 import org.apache.streams.instagram.provider.InstagramDataCollector;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.Uninterruptibles;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -40,78 +42,91 @@ import java.util.Iterator;
 import java.util.concurrent.TimeUnit;
 
 /**
- * Instagram {@link org.apache.streams.core.StreamsProvider} that provides the recent media data for a group of users
+ * Instagram {@link org.apache.streams.core.StreamsProvider} that provides the recent media data for a group of users.
  *
+ * <p/>
  * Retrieve recent posts from a list of user ids or names.
  *
- *  To use from command line:
- *
- *  Supply (at least) the following required configuration in application.conf:
- *
- *  instagram.clientKey
- *  instagram.usersInfo.authorizedTokens
- *  instagram.usersInfo.users
- *
- *  Launch using:
- *
- *  mvn exec:java -Dexec.mainClass=org.apache.streams.instagram.provider.recentmedia.InstagramRecentMediaProvider -Dexec.args="application.conf media.json"
  */
 public class InstagramRecentMediaProvider extends InstagramAbstractProvider {
 
-    public static final String STREAMS_ID = "InstagramRecentMediaProvider";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramRecentMediaProvider.class);
-
-    private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-
-    public InstagramRecentMediaProvider() {
-    }
-
-    public InstagramRecentMediaProvider(InstagramConfiguration config) {
-        super(config);
-    }
-
-    @Override
-    protected InstagramDataCollector getInstagramDataCollector() {
-        return new InstagramRecentMediaCollector(super.dataQueue, super.config);
-    }
-
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config conf = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = conf.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        InstagramConfiguration config = new ComponentConfigurator<>(InstagramConfiguration.class).detectConfiguration(typesafe, "instagram");
-        InstagramRecentMediaProvider provider = new InstagramRecentMediaProvider(config);
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = MAPPER.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+  public static final String STREAMS_ID = "InstagramRecentMediaProvider";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramRecentMediaProvider.class);
+
+  private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  public InstagramRecentMediaProvider() {
+  }
+
+  public InstagramRecentMediaProvider(InstagramConfiguration config) {
+    super(config);
+  }
+
+  @Override
+  protected InstagramDataCollector getInstagramDataCollector() {
+    return new InstagramRecentMediaCollector(super.dataQueue, super.config);
+  }
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply (at least) the following required configuration in application.conf:
+   *
+   * <p/>
+   * instagram.clientKey
+   * instagram.usersInfo.authorizedTokens
+   * instagram.usersInfo.users
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java \
+   * -Dexec.mainClass=org.apache.streams.instagram.provider.recentmedia.InstagramRecentMediaProvider \
+   * -Dexec.args="application.conf media.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config conf = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = conf.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    InstagramConfiguration config = new ComponentConfigurator<>(InstagramConfiguration.class).detectConfiguration(typesafe, "instagram");
+    InstagramRecentMediaProvider provider = new InstagramRecentMediaProvider(config);
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          json = MAPPER.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
+        }
+      }
     }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoCollector.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoCollector.java
index 0985ae8..98d0f3c 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoCollector.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoCollector.java
@@ -12,13 +12,15 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
+
 package org.apache.streams.instagram.provider.userinfo;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.instagram.InstagramConfiguration;
 import org.apache.streams.instagram.User;
 import org.apache.streams.instagram.provider.InstagramDataCollector;
+
+import com.google.common.collect.Lists;
 import org.jinstagram.entity.users.basicinfo.UserInfo;
 import org.jinstagram.entity.users.basicinfo.UserInfoData;
 import org.jinstagram.exceptions.InstagramBadRequestException;
@@ -33,60 +35,60 @@ import java.util.Queue;
  * InstagramDataCollector that pulls UserInfoData from Instagram
  * @see org.apache.streams.instagram.provider.InstagramDataCollector
  */
-public class InstagramUserInfoCollector extends InstagramDataCollector<UserInfoData>{
+public class InstagramUserInfoCollector extends InstagramDataCollector<UserInfoData> {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoCollector.class);
-    protected static final int MAX_ATTEMPTS = 5;
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoCollector.class);
+  protected static final int MAX_ATTEMPTS = 5;
 
-    private int consecutiveErrorCount;
+  private int consecutiveErrorCount;
 
-    public InstagramUserInfoCollector(Queue<StreamsDatum> dataQueue, InstagramConfiguration config) {
-        super(dataQueue, config);
-        this.consecutiveErrorCount = 0;
-    }
+  public InstagramUserInfoCollector(Queue<StreamsDatum> dataQueue, InstagramConfiguration config) {
+    super(dataQueue, config);
+    this.consecutiveErrorCount = 0;
+  }
 
-    @Override
-    protected void collectInstagramDataForUser(User user) throws Exception {
-        int attempt = 0;
-        boolean successful = false;
-        UserInfo userInfo = null;
-        while(!successful && attempt < MAX_ATTEMPTS) {
-            ++attempt;
-            try {
-                userInfo = getNextInstagramClient().getUserInfo(user.getUserId());
-            } catch (Exception e) {
-                if(e instanceof InstagramRateLimitException) {
-                    LOGGER.warn("Hit rate limit exception, backing off.");
-                    super.backOffStrategy.backOff();
-                } else if(e instanceof InstagramBadRequestException) {
-                    LOGGER.error("Sent a bad request to Instagram, skipping user : {}", user.getUserId());
-                    attempt = MAX_ATTEMPTS;
-                    ++this.consecutiveErrorCount;
-                } else {
-                    LOGGER.error("Expection while polling instagram : {}", e);
-                    ++this.consecutiveErrorCount;
-                }
-                if(this.consecutiveErrorCount >= Math.max(super.numAvailableTokens(), MAX_ATTEMPTS * 2)) {
-                    LOGGER.error("Consecutive Errors above acceptable limits, ending collection of data.");
-                    throw new Exception("Consecutive Errors above acceptable limits : "+this.consecutiveErrorCount);
-                }
-            }
-            if(successful = (userInfo != null)) {
-                this.consecutiveErrorCount = 0;
-                List<UserInfoData> data = Lists.newLinkedList();
-                data.add(userInfo.getData());
-                super.queueData(data, user.getUserId());
-            }
+  @Override
+  protected void collectInstagramDataForUser(User user) throws Exception {
+    int attempt = 0;
+    boolean successful = false;
+    UserInfo userInfo = null;
+    while (!successful && attempt < MAX_ATTEMPTS) {
+      ++attempt;
+      try {
+        userInfo = getNextInstagramClient().getUserInfo(user.getUserId());
+      } catch (Exception ex) {
+        if (ex instanceof InstagramRateLimitException) {
+          LOGGER.warn("Hit rate limit exception, backing off.");
+          super.backOffStrategy.backOff();
+        } else if (ex instanceof InstagramBadRequestException) {
+          LOGGER.error("Sent a bad request to Instagram, skipping user : {}", user.getUserId());
+          attempt = MAX_ATTEMPTS;
+          ++this.consecutiveErrorCount;
+        } else {
+          LOGGER.error("Expection while polling instagram : {}", ex);
+          ++this.consecutiveErrorCount;
         }
-        if(attempt == MAX_ATTEMPTS) {
-            LOGGER.error("Failed to collect data for user : {}", user.getUserId());
+        if (this.consecutiveErrorCount >= Math.max(super.numAvailableTokens(), MAX_ATTEMPTS * 2)) {
+          LOGGER.error("Consecutive Errors above acceptable limits, ending collection of data.");
+          throw new Exception("Consecutive Errors above acceptable limits : " + this.consecutiveErrorCount);
         }
+      }
+      if (successful = (userInfo != null)) {
+        this.consecutiveErrorCount = 0;
+        List<UserInfoData> data = Lists.newLinkedList();
+        data.add(userInfo.getData());
+        super.queueData(data, user.getUserId());
+      }
     }
-
-    @Override
-    protected StreamsDatum convertToStreamsDatum(UserInfoData item) {
-        return new StreamsDatum(item, item.getId());
+    if (attempt == MAX_ATTEMPTS) {
+      LOGGER.error("Failed to collect data for user : {}", user.getUserId());
     }
+  }
+
+  @Override
+  protected StreamsDatum convertToStreamsDatum(UserInfoData item) {
+    return new StreamsDatum(item, item.getId());
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoProvider.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoProvider.java
index 0a47944..4469cd6 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoProvider.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/provider/userinfo/InstagramUserInfoProvider.java
@@ -12,16 +12,9 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
+
 package org.apache.streams.instagram.provider.userinfo;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.Uninterruptibles;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
@@ -31,6 +24,14 @@ import org.apache.streams.instagram.InstagramUserInformationConfiguration;
 import org.apache.streams.instagram.provider.InstagramAbstractProvider;
 import org.apache.streams.instagram.provider.InstagramDataCollector;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.Uninterruptibles;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,80 +43,92 @@ import java.util.Iterator;
 import java.util.concurrent.TimeUnit;
 
 /**
- * Instagram provider that pulls UserInfoData from Instagram
- * @see org.apache.streams.instagram.provider.InstagramAbstractProvider
+ * Instagram provider that pulls UserInfoData from Instagram.
  *
+ * <p/>
  * Retrieve latest user details from a list of user ids or names.
- *
- *  To use from command line:
- *
- *  Supply (at least) the following required configuration in application.conf:
- *
- *  instagram.clientKey
- *  instagram.usersInfo.authorizedTokens
- *  instagram.usersInfo.users
- *
- *  Launch using:
- *
- *  mvn exec:java -Dexec.mainClass=org.apache.streams.instagram.provider.userinfo.InstagramUserInfoProvider -Dexec.args="application.conf userinfo.json"
  */
 public class InstagramUserInfoProvider extends InstagramAbstractProvider {
 
-    public static final String STREAMS_ID = "InstagramUserInfoProvider";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoProvider.class);
-
-    private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-
-    public InstagramUserInfoProvider() {
-        super();
-    }
-
-    public InstagramUserInfoProvider(InstagramConfiguration config) {
-        super(config);
-    }
-
-    @Override
-    protected InstagramDataCollector getInstagramDataCollector() {
-        return new InstagramUserInfoCollector(super.dataQueue, super.config);
-    }
-
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config conf = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = conf.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        InstagramUserInformationConfiguration config = new ComponentConfigurator<>(InstagramUserInformationConfiguration.class).detectConfiguration(typesafe, "instagram");
-        InstagramUserInfoProvider provider = new InstagramUserInfoProvider(config);
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = MAPPER.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+  public static final String STREAMS_ID = "InstagramUserInfoProvider";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoProvider.class);
+
+  private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  public InstagramUserInfoProvider() {
+    super();
+  }
+
+  public InstagramUserInfoProvider(InstagramConfiguration config) {
+    super(config);
+  }
+
+  @Override
+  protected InstagramDataCollector getInstagramDataCollector() {
+    return new InstagramUserInfoCollector(super.dataQueue, super.config);
+  }
+
+  /**
+   * To use from command line:
+   *
+   * <p/>
+   * Supply (at least) the following required configuration in application.conf:
+   *
+   * <p/>
+   * instagram.clientKey
+   * instagram.usersInfo.authorizedTokens
+   * instagram.usersInfo.users
+   *
+   * <p/>
+   * Launch using:
+   *
+   * <p/>
+   * mvn exec:java \
+   * -Dexec.mainClass=org.apache.streams.instagram.provider.userinfo.InstagramUserInfoProvider \
+   * -Dexec.args="application.conf userinfo.json"
+   *
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config conf = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = conf.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    InstagramUserInformationConfiguration config = new ComponentConfigurator<>(InstagramUserInformationConfiguration.class)
+        .detectConfiguration(typesafe, "instagram");
+    InstagramUserInfoProvider provider = new InstagramUserInfoProvider(config);
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          json = MAPPER.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
+        }
+      }
     }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramMediaFeedDataConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramMediaFeedDataConverter.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramMediaFeedDataConverter.java
index 8795e2c..c9bbd85 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramMediaFeedDataConverter.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramMediaFeedDataConverter.java
@@ -18,11 +18,12 @@
 
 package org.apache.streams.instagram.serializer;
 
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.Activity;
+
+import com.google.common.collect.Lists;
+import org.apache.commons.lang.NotImplementedException;
 import org.jinstagram.entity.users.feed.MediaFeedData;
 
 import java.io.Serializable;
@@ -30,48 +31,47 @@ import java.util.List;
 
 import static org.apache.streams.instagram.serializer.util.InstagramActivityUtil.updateActivity;
 
-public class InstagramMediaFeedDataConverter implements ActivityConverter<MediaFeedData>, Serializable
-{
+public class InstagramMediaFeedDataConverter implements ActivityConverter<MediaFeedData>, Serializable {
 
-    public static Class requiredClass = MediaFeedData.class;
+  public static Class requiredClass = MediaFeedData.class;
 
-    public InstagramMediaFeedDataConverter() {
+  public InstagramMediaFeedDataConverter() {
 
-    }
+  }
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public MediaFeedData fromActivity(Activity deserialized) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
+  @Override
+  public MediaFeedData fromActivity(Activity deserialized) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
 
-    @Override
-    public List<Activity> toActivityList(MediaFeedData item) throws ActivityConversionException {
+  @Override
+  public List<MediaFeedData> fromActivityList(List<Activity> list) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
 
-        Activity activity = new Activity();
+  @Override
+  public List<Activity> toActivityList(MediaFeedData item) throws ActivityConversionException {
 
-        updateActivity(item, activity);
+    Activity activity = new Activity();
 
-        return Lists.newArrayList(activity);
-    }
+    updateActivity(item, activity);
 
-    @Override
-    public List<MediaFeedData> fromActivityList(List<Activity> list) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
+    return Lists.newArrayList(activity);
+  }
 
-    @Override
-    public List<Activity> toActivityList(List<MediaFeedData> list) throws ActivityConversionException {
-        throw new NotImplementedException();
-    }
+  @Override
+  public List<Activity> toActivityList(List<MediaFeedData> list) throws ActivityConversionException {
+    throw new NotImplementedException();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramUserInfoDataConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramUserInfoDataConverter.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramUserInfoDataConverter.java
index 4fa680c..1109163 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramUserInfoDataConverter.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/InstagramUserInfoDataConverter.java
@@ -23,6 +23,7 @@ import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Image;
 import org.apache.streams.pojo.json.Provider;
+
 import org.jinstagram.entity.users.basicinfo.UserInfoData;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
@@ -33,56 +34,56 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- *
+ * InstagramUserInfoDataConverter
  */
 public class InstagramUserInfoDataConverter implements ActivityObjectConverter<UserInfoData> {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoDataConverter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoDataConverter.class);
 
-    private static final String STREAMS_ID_PREFIX = "id:instagram:";
-    private static final String PROVIDER_ID = "id:provider:instagram";
-    private static final String DISPLAY_NAME = "Instagram";
+  private static final String STREAMS_ID_PREFIX = "id:instagram:";
+  private static final String PROVIDER_ID = "id:provider:instagram";
+  private static final String DISPLAY_NAME = "Instagram";
 
-    @Override
-    public Class requiredClass() {
-        return UserInfoData.class;
-    }
+  @Override
+  public Class requiredClass() {
+    return UserInfoData.class;
+  }
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public UserInfoData fromActivityObject(ActivityObject deserialized) throws ActivityConversionException {
-        return null;
-    }
+  @Override
+  public UserInfoData fromActivityObject(ActivityObject deserialized) throws ActivityConversionException {
+    return null;
+  }
 
-    @Override
-    public ActivityObject toActivityObject(UserInfoData serialized) throws ActivityConversionException {
-        ActivityObject activityObject = new ActivityObject();
-        activityObject.setObjectType("page");
-        Provider provider = new Provider();
-        provider.setId(PROVIDER_ID);
-        provider.setDisplayName(DISPLAY_NAME);
-        activityObject.getAdditionalProperties().put("provider", provider);
-        activityObject.setPublished(DateTime.now().withZone(DateTimeZone.UTC));
-        Image image = new Image();
-        image.setUrl(serialized.getProfilePicture());
-        activityObject.setImage(image);
-        activityObject.setId(STREAMS_ID_PREFIX+serialized.getId());
-        activityObject.setSummary(serialized.getBio());
-        activityObject.setAdditionalProperty("handle", serialized.getUsername());
-        activityObject.setDisplayName(serialized.getFullName());
-        activityObject.setUrl(serialized.getWebsite());
-        Map<String, Object> extensions = new HashMap<>();
-        activityObject.setAdditionalProperty("extensions", extensions);
-        extensions.put("screenName", serialized.getUsername());
-        extensions.put("posts", serialized.getCounts().getMedia());
-        extensions.put("followers", serialized.getCounts().getFollowedBy());
-        extensions.put("website", serialized.getWebsite());
-        extensions.put("following", serialized.getCounts().getFollows());
-        return activityObject;
-    }
+  @Override
+  public ActivityObject toActivityObject(UserInfoData serialized) throws ActivityConversionException {
+    ActivityObject activityObject = new ActivityObject();
+    activityObject.setObjectType("page");
+    Provider provider = new Provider();
+    provider.setId(PROVIDER_ID);
+    provider.setDisplayName(DISPLAY_NAME);
+    activityObject.getAdditionalProperties().put("provider", provider);
+    activityObject.setPublished(DateTime.now().withZone(DateTimeZone.UTC));
+    Image image = new Image();
+    image.setUrl(serialized.getProfilePicture());
+    activityObject.setImage(image);
+    activityObject.setId(STREAMS_ID_PREFIX + serialized.getId());
+    activityObject.setSummary(serialized.getBio());
+    activityObject.setAdditionalProperty("handle", serialized.getUsername());
+    activityObject.setDisplayName(serialized.getFullName());
+    activityObject.setUrl(serialized.getWebsite());
+    Map<String, Object> extensions = new HashMap<>();
+    activityObject.setAdditionalProperty("extensions", extensions);
+    extensions.put("screenName", serialized.getUsername());
+    extensions.put("posts", serialized.getCounts().getMedia());
+    extensions.put("followers", serialized.getCounts().getFollowedBy());
+    extensions.put("website", serialized.getWebsite());
+    extensions.put("following", serialized.getCounts().getFollows());
+    return activityObject;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/util/InstagramActivityUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/util/InstagramActivityUtil.java b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/util/InstagramActivityUtil.java
index 1c82da4..dd181de 100644
--- a/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/util/InstagramActivityUtil.java
+++ b/streams-contrib/streams-provider-instagram/src/main/java/org/apache/streams/instagram/serializer/util/InstagramActivityUtil.java
@@ -19,9 +19,6 @@
 
 package org.apache.streams.instagram.serializer.util;
 
-import com.google.common.base.Joiner;
-import com.google.common.base.Optional;
-import com.google.common.collect.Lists;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.exceptions.ActivitySerializerException;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
@@ -29,6 +26,10 @@ import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 import org.apache.streams.pojo.json.Image;
 import org.apache.streams.pojo.json.Provider;
+
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.collect.Lists;
 import org.jinstagram.entity.comments.CommentData;
 import org.jinstagram.entity.common.Comments;
 import org.jinstagram.entity.common.ImageData;
@@ -48,301 +49,305 @@ import java.util.List;
 import java.util.Map;
 
 /**
- * Provides utilities for working with Activity objects within the context of Instagram
+ * Provides utilities for working with Activity objects within the context of Instagram.
  */
 public class InstagramActivityUtil {
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramActivityUtil.class);
-    /**
-     * Updates the given Activity object with the values from the item
-     * @param item the object to use as the source
-     * @param activity the target of the updates.  Will receive all values from the tweet.
-     * @throws ActivityConversionException
-     */
-    public static void updateActivity(MediaFeedData item, Activity activity) throws ActivityConversionException {
-        activity.setActor(buildActor(item));
-        activity.setVerb("post");
-
-        if(item.getCreatedTime() != null)
-            activity.setPublished(new DateTime(Long.parseLong(item.getCreatedTime()) * 1000));
-
-        activity.setId(formatId(activity.getVerb(),
-                Optional.fromNullable(
-                        item.getId())
-                        .orNull()));
-
-        activity.setProvider(getProvider());
-        activity.setUrl(item.getLink());
-        activity.setObject(buildActivityObject(item));
-
-        if(item.getCaption() != null)
-            activity.setContent(item.getCaption().getText());
-
-        addInstagramExtensions(activity, item);
-    }
-
-    /**
-     * Updates the given Activity object with the values from the item
-     * @param item the object to use as the source
-     * @param activity the target of the updates.  Will receive all values from the tweet.
-     * @throws ActivitySerializerException
-     */
-    public static void updateActivity(UserInfoData item, Activity activity) throws ActivitySerializerException {
-        activity.setActor(buildActor(item));
-        activity.setId(null);
-        activity.setProvider(getProvider());
-    }
-
-    /**
-     * Builds an Actor object given a UserInfoData object
-     * @param item
-     * @return Actor object
-     */
-    public static ActivityObject buildActor(UserInfoData item) {
-        ActivityObject actor = new ActivityObject();
 
-        try {
-            Image image = new Image();
-            image.setUrl(item.getProfilePicture());
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramActivityUtil.class);
+  /**
+   * Updates the given Activity object with the values from the item
+   * @param item the object to use as the source
+   * @param activity the target of the updates.  Will receive all values from the tweet.
+   * @throws ActivityConversionException ActivityConversionException
+   */
 
-            Counts counts = item.getCounts();
+  public static void updateActivity(MediaFeedData item, Activity activity) throws ActivityConversionException {
+    activity.setActor(buildActor(item));
+    activity.setVerb("post");
 
-            Map<String, Object> extensions = new HashMap<>();
-
-            extensions.put("followers", counts.getFollowedBy());
-            extensions.put("follows", counts.getFollows());
-            extensions.put("screenName", item.getUsername());
-            extensions.put("posts", counts.getMedia());
+    if (item.getCreatedTime() != null) {
+      activity.setPublished(new DateTime(Long.parseLong(item.getCreatedTime()) * 1000));
+    }
 
-            actor.setId(formatId(String.valueOf(item.getId())));
-            actor.setImage(image);
-            actor.setDisplayName(item.getFullName());
-            actor.setSummary(item.getBio());
-            actor.setUrl(item.getWebsite());
+    activity.setId(formatId(activity.getVerb(),
+        Optional.fromNullable(
+            item.getId())
+            .orNull()));
 
-            actor.setAdditionalProperty("handle", item.getUsername());
-            actor.setAdditionalProperty("extensions", extensions);
-        } catch (Exception e) {
-            LOGGER.error("Exception trying to build actor object: {}", e.getMessage());
-        }
+    activity.setProvider(getProvider());
+    activity.setUrl(item.getLink());
+    activity.setObject(buildActivityObject(item));
 
-        return actor;
+    if (item.getCaption() != null) {
+      activity.setContent(item.getCaption().getText());
     }
 
-    /**
-     * Builds the actor
-     * @param item the item
-     * @return a valid Actor
-     */
-    public static ActivityObject buildActor(MediaFeedData item) {
-        ActivityObject actor = new ActivityObject();
-
-        try {
-            Image image = new Image();
-            image.setUrl(item.getUser().getProfilePictureUrl());
-
-            Map<String, Object> extensions = new HashMap<>();
-            extensions.put("screenName", item.getUser().getUserName());
-
-            actor.setDisplayName(item.getUser().getFullName());
-            actor.setSummary(item.getUser().getBio());
-            actor.setUrl(item.getUser().getWebsiteUrl());
-
-            actor.setId(formatId(String.valueOf(item.getUser().getId())));
-            actor.setImage(image);
-            actor.setAdditionalProperty("extensions", extensions);
-            actor.setAdditionalProperty("handle", item.getUser().getUserName());
-        } catch (Exception e) {
-            LOGGER.error("Exception trying to build actor object: {}", e.getMessage());
-        }
-
-        return actor;
+    addInstagramExtensions(activity, item);
+  }
+
+  /**
+   * Updates the given Activity object with the values from the item
+   * @param item the object to use as the source
+   * @param activity the target of the updates.  Will receive all values from the tweet.
+   * @throws ActivitySerializerException ActivitySerializerException
+   */
+  public static void updateActivity(UserInfoData item, Activity activity) throws ActivitySerializerException {
+    activity.setActor(buildActor(item));
+    activity.setId(null);
+    activity.setProvider(getProvider());
+  }
+
+  /**
+   * Builds an Actor object given a UserInfoData object.
+   * @param item UserInfoData item
+   * @return Actor object
+   */
+  public static ActivityObject buildActor(UserInfoData item) {
+    ActivityObject actor = new ActivityObject();
+
+    try {
+      Image image = new Image();
+      image.setUrl(item.getProfilePicture());
+
+      Counts counts = item.getCounts();
+
+      Map<String, Object> extensions = new HashMap<>();
+
+      extensions.put("followers", counts.getFollowedBy());
+      extensions.put("follows", counts.getFollows());
+      extensions.put("screenName", item.getUsername());
+      extensions.put("posts", counts.getMedia());
+
+      actor.setId(formatId(String.valueOf(item.getId())));
+      actor.setImage(image);
+      actor.setDisplayName(item.getFullName());
+      actor.setSummary(item.getBio());
+      actor.setUrl(item.getWebsite());
+
+      actor.setAdditionalProperty("handle", item.getUsername());
+      actor.setAdditionalProperty("extensions", extensions);
+    } catch (Exception ex) {
+      LOGGER.error("Exception trying to build actor object: {}", ex.getMessage());
     }
 
-    /**
-     * Builds the ActivityObject
-     * @param item the item
-     * @return a valid Activity Object
-     */
-    public static ActivityObject buildActivityObject(MediaFeedData item) {
-        ActivityObject actObj = new ActivityObject();
-
-        actObj.setObjectType(item.getType());
-        actObj.setAttachments(buildActivityObjectAttachments(item));
-
-        Image standardResolution = new Image();
-        if(item.getType().equals("image") && item.getImages() != null) {
-            ImageData standardResolutionData = item.getImages().getStandardResolution();
-            standardResolution.setHeight((long) standardResolutionData.getImageHeight());
-            standardResolution.setWidth((long) standardResolutionData.getImageWidth());
-            standardResolution.setUrl(standardResolutionData.getImageUrl());
-        } else if(item.getType().equals("video") && item.getVideos() != null) {
-            VideoData standardResolutionData = item.getVideos().getStandardResolution();
-            standardResolution.setHeight((long) standardResolutionData.getHeight());
-            standardResolution.setWidth((long) standardResolutionData.getWidth());
-            standardResolution.setUrl(standardResolutionData.getUrl());
-        }
-
-        actObj.setImage(standardResolution);
-
-        return actObj;
+    return actor;
+  }
+
+  /**
+   * Builds the actor.
+   * @param item MediaFeedData item
+   * @return a valid ActivityObject
+   */
+  public static ActivityObject buildActor(MediaFeedData item) {
+    ActivityObject actor = new ActivityObject();
+
+    try {
+      Image image = new Image();
+      image.setUrl(item.getUser().getProfilePictureUrl());
+
+      Map<String, Object> extensions = new HashMap<>();
+      extensions.put("screenName", item.getUser().getUserName());
+
+      actor.setDisplayName(item.getUser().getFullName());
+      actor.setSummary(item.getUser().getBio());
+      actor.setUrl(item.getUser().getWebsiteUrl());
+
+      actor.setId(formatId(String.valueOf(item.getUser().getId())));
+      actor.setImage(image);
+      actor.setAdditionalProperty("extensions", extensions);
+      actor.setAdditionalProperty("handle", item.getUser().getUserName());
+    } catch (Exception ex) {
+      LOGGER.error("Exception trying to build actor object: {}", ex.getMessage());
     }
 
-    /**
-     * Builds all of the attachments associated with a MediaFeedData object
-     *
-     * @param item
-     * @return
-     */
-    public static List<ActivityObject> buildActivityObjectAttachments(MediaFeedData item) {
-        List<ActivityObject> attachments = new ArrayList<>();
-
-        addImageObjects(attachments, item);
-        addVideoObjects(attachments, item);
-
-        return attachments;
+    return actor;
+  }
+
+  /**
+   * Builds the object.
+   * @param item the item
+   * @return a valid Activity Object
+   */
+  public static ActivityObject buildActivityObject(MediaFeedData item) {
+    ActivityObject actObj = new ActivityObject();
+
+    actObj.setObjectType(item.getType());
+    actObj.setAttachments(buildActivityObjectAttachments(item));
+
+    Image standardResolution = new Image();
+    if (item.getType().equals("image") && item.getImages() != null) {
+      ImageData standardResolutionData = item.getImages().getStandardResolution();
+      standardResolution.setHeight((long) standardResolutionData.getImageHeight());
+      standardResolution.setWidth((long) standardResolutionData.getImageWidth());
+      standardResolution.setUrl(standardResolutionData.getImageUrl());
+    } else if (item.getType().equals("video") && item.getVideos() != null) {
+      VideoData standardResolutionData = item.getVideos().getStandardResolution();
+      standardResolution.setHeight((long) standardResolutionData.getHeight());
+      standardResolution.setWidth((long) standardResolutionData.getWidth());
+      standardResolution.setUrl(standardResolutionData.getUrl());
     }
 
-    /**
-     * Adds any image objects to the attachment field
-     * @param attachments
-     * @param item
-     */
-    public static void addImageObjects(List<ActivityObject> attachments, MediaFeedData item) {
-        Images images = item.getImages();
-
-        if(images != null) {
-            try {
-                ImageData thumbnail = images.getThumbnail();
-                ImageData lowResolution = images.getLowResolution();
-
-                ActivityObject thumbnailObject = new ActivityObject();
-                Image thumbnailImage = new Image();
-                thumbnailImage.setUrl(thumbnail.getImageUrl());
-                thumbnailImage.setHeight((long) thumbnail.getImageHeight());
-                thumbnailImage.setWidth((long) thumbnail.getImageWidth());
-                thumbnailObject.setImage(thumbnailImage);
-                thumbnailObject.setObjectType("image");
-
-                ActivityObject lowResolutionObject = new ActivityObject();
-                Image lowResolutionImage = new Image();
-                lowResolutionImage.setUrl(lowResolution.getImageUrl());
-                lowResolutionImage.setHeight((long) lowResolution.getImageHeight());
-                lowResolutionImage.setWidth((long) lowResolution.getImageWidth());
-                lowResolutionObject.setImage(lowResolutionImage);
-                lowResolutionObject.setObjectType("image");
-
-                attachments.add(thumbnailObject);
-                attachments.add(lowResolutionObject);
-            } catch (Exception e) {
-                LOGGER.error("Failed to add image objects: {}", e.getMessage());
-            }
-        }
+    actObj.setImage(standardResolution);
+
+    return actObj;
+  }
+
+  /**
+   * Builds all of the attachments associated with a MediaFeedData object.
+   *
+   * @param item item
+   * @return result
+   */
+  public static List<ActivityObject> buildActivityObjectAttachments(MediaFeedData item) {
+    List<ActivityObject> attachments = new ArrayList<>();
+
+    addImageObjects(attachments, item);
+    addVideoObjects(attachments, item);
+
+    return attachments;
+  }
+
+  /**
+   * Adds any image objects to the attachment field.
+   * @param attachments attachments
+   * @param item item
+   */
+  public static void addImageObjects(List<ActivityObject> attachments, MediaFeedData item) {
+    Images images = item.getImages();
+
+    if (images != null) {
+      try {
+        ImageData thumbnail = images.getThumbnail();
+        ImageData lowResolution = images.getLowResolution();
+
+        ActivityObject thumbnailObject = new ActivityObject();
+        Image thumbnailImage = new Image();
+        thumbnailImage.setUrl(thumbnail.getImageUrl());
+        thumbnailImage.setHeight((long) thumbnail.getImageHeight());
+        thumbnailImage.setWidth((long) thumbnail.getImageWidth());
+        thumbnailObject.setImage(thumbnailImage);
+        thumbnailObject.setObjectType("image");
+
+        ActivityObject lowResolutionObject = new ActivityObject();
+        Image lowResolutionImage = new Image();
+        lowResolutionImage.setUrl(lowResolution.getImageUrl());
+        lowResolutionImage.setHeight((long) lowResolution.getImageHeight());
+        lowResolutionImage.setWidth((long) lowResolution.getImageWidth());
+        lowResolutionObject.setImage(lowResolutionImage);
+        lowResolutionObject.setObjectType("image");
+
+        attachments.add(thumbnailObject);
+        attachments.add(lowResolutionObject);
+      } catch (Exception ex) {
+        LOGGER.error("Failed to add image objects: {}", ex.getMessage());
+      }
     }
-
-    /**
-     * Adds any video objects to the attachment field
-     * @param attachments
-     * @param item
-     */
-    public static void addVideoObjects(List<ActivityObject> attachments, MediaFeedData item) {
-        Videos videos = item.getVideos();
-
-        if(videos != null) {
-            try {
-                VideoData lowResolutionVideo = videos.getLowResolution();
-
-                ActivityObject lowResolutionVideoObject = new ActivityObject();
-                Image lowResolutionVideoImage = new Image();
-                lowResolutionVideoImage.setUrl(lowResolutionVideo.getUrl());
-                lowResolutionVideoImage.setHeight((long) lowResolutionVideo.getHeight());
-                lowResolutionVideoImage.setWidth((long) lowResolutionVideo.getWidth());
-                lowResolutionVideoObject.setImage(lowResolutionVideoImage);
-                lowResolutionVideoObject.setObjectType("video");
-
-                attachments.add(lowResolutionVideoObject);
-            } catch (Exception e) {
-                LOGGER.error("Failed to add video objects: {}", e.getMessage());
-            }
-        }
+  }
+
+  /**
+   * Adds any video objects to the attachment field.
+   * @param attachments attachments
+   * @param item item
+   */
+  public static void addVideoObjects(List<ActivityObject> attachments, MediaFeedData item) {
+    Videos videos = item.getVideos();
+
+    if (videos != null) {
+      try {
+        VideoData lowResolutionVideo = videos.getLowResolution();
+
+        ActivityObject lowResolutionVideoObject = new ActivityObject();
+        Image lowResolutionVideoImage = new Image();
+        lowResolutionVideoImage.setUrl(lowResolutionVideo.getUrl());
+        lowResolutionVideoImage.setHeight((long) lowResolutionVideo.getHeight());
+        lowResolutionVideoImage.setWidth((long) lowResolutionVideo.getWidth());
+        lowResolutionVideoObject.setImage(lowResolutionVideoImage);
+        lowResolutionVideoObject.setObjectType("video");
+
+        attachments.add(lowResolutionVideoObject);
+      } catch (Exception ex) {
+        LOGGER.error("Failed to add video objects: {}", ex.getMessage());
+      }
     }
-
-    /**
-     * Gets the links from the Instagram event
-     * @param item the object to use as the source
-     * @return a list of links corresponding to the expanded URL
-     */
-    public static List<String> getLinks(MediaFeedData item) {
-        return new ArrayList<>();
+  }
+
+  /**
+   * Gets the links from the Instagram event.
+   * @param item the object to use as the source
+   * @return a list of links corresponding to the expanded URL
+   */
+  public static List<String> getLinks(MediaFeedData item) {
+    return new ArrayList<>();
+  }
+
+  /**
+   * Adds the location extension and populates with the instagram data.
+   * @param activity the Activity object to update
+   * @param item the object to use as the source
+   */
+  public static void addLocationExtension(Activity activity, MediaFeedData item) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+
+    if (item.getLocation() != null) {
+      Map<String, Object> coordinates = new HashMap<>();
+      coordinates.put("type", "Point");
+      coordinates.put("coordinates", "[" + item.getLocation().getLongitude() + "," + item.getLocation().getLatitude() + "]");
+
+      extensions.put("coordinates", coordinates);
     }
-
-    /**
-     * Adds the location extension and populates with teh instagram data
-     * @param activity the Activity object to update
-     * @param item the object to use as the source
-     */
-    public static void addLocationExtension(Activity activity, MediaFeedData item) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-
-        if(item.getLocation() != null) {
-            Map<String, Object> coordinates = new HashMap<>();
-            coordinates.put("type", "Point");
-            coordinates.put("coordinates", "[" + item.getLocation().getLongitude() + "," + item.getLocation().getLatitude() + "]");
-
-            extensions.put("coordinates", coordinates);
-        }
+  }
+
+  /**
+   * Gets the common instagram {@link org.apache.streams.pojo.json.Provider} object.
+   * @return a provider object representing Instagram
+   */
+  public static Provider getProvider() {
+    Provider provider = new Provider();
+    provider.setId("id:providers:instagram");
+    provider.setDisplayName("Instagram");
+    return provider;
+  }
+
+  /**
+   * Formats the ID to conform with the Apache Streams activity ID convention
+   * @param idparts the parts of the ID to join
+   * @return a valid Activity ID in format "id:instagram:part1:part2:...partN"
+   */
+  public static String formatId(String... idparts) {
+    return Joiner.on(":").join(Lists.asList("id:instagram", idparts));
+  }
+
+  /**
+   * Takes various parameters from the instagram object that are currently not part of the
+   * activity schema and stores them in a generic extensions attribute.
+   * @param activity Activity activity
+   * @param item MediaFeedData item
+   */
+  public static void addInstagramExtensions(Activity activity, MediaFeedData item) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+
+    addLocationExtension(activity, item);
+
+    if (item.getLikes() != null) {
+      Map<String, Object> likes = new HashMap<>();
+      likes.put("count", item.getLikes().getCount());
+      extensions.put("likes", likes);
     }
 
-    /**
-     * Gets the common instagram {@link org.apache.streams.pojo.json.Provider} object
-     * @return a provider object representing Instagram
-     */
-    public static Provider getProvider() {
-        Provider provider = new Provider();
-        provider.setId("id:providers:instagram");
-        provider.setDisplayName("Instagram");
-        return provider;
-    }
+    extensions.put("hashtags", item.getTags());
 
-    /**
-     * Formats the ID to conform with the Apache Streams activity ID convention
-     * @param idparts the parts of the ID to join
-     * @return a valid Activity ID in format "id:instagram:part1:part2:...partN"
-     */
-    public static String formatId(String... idparts) {
-        return Joiner.on(":").join(Lists.asList("id:instagram", idparts));
-    }
+    Comments comments = item.getComments();
+    String commentsConcat = "";
 
-    /**
-     * Takes various parameters from the instagram object that are currently not part of teh
-     * activity schema and stores them in a generic extensions attribute
-     * @param activity
-     * @param item
-     */
-    public static void addInstagramExtensions(Activity activity, MediaFeedData item) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-
-        addLocationExtension(activity, item);
-
-        if(item.getLikes() != null) {
-            Map<String, Object> likes = new HashMap<>();
-            likes.put("count", item.getLikes().getCount());
-            extensions.put("likes", likes);
-        }
-
-        extensions.put("hashtags", item.getTags());
-
-        Comments comments = item.getComments();
-        String commentsConcat = "";
-
-        if(comments != null) {
-            for (CommentData commentData : comments.getComments()) {
-                commentsConcat += " " + commentData.getText();
-            }
-        }
-        if(item.getCaption() != null) {
-            commentsConcat += " " + item.getCaption().getText();
-        }
-
-        extensions.put("keywords", commentsConcat);
+    if (comments != null) {
+      for (CommentData commentData : comments.getComments()) {
+        commentsConcat += " " + commentData.getText();
+      }
     }
+    if (item.getCaption() != null) {
+      commentsConcat += " " + item.getCaption().getText();
+    }
+
+    extensions.put("keywords", commentsConcat);
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramMediaFeedDataConverterIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramMediaFeedDataConverterIT.java b/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramMediaFeedDataConverterIT.java
index af99c14..664cd85 100644
--- a/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramMediaFeedDataConverterIT.java
+++ b/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramMediaFeedDataConverterIT.java
@@ -18,86 +18,90 @@
 
 package org.apache.streams.instagram.test.data;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.gson.Gson;
-import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
-import java.io.FileOutputStream;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.PrintStream;
-import org.apache.commons.lang.StringUtils;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.instagram.serializer.InstagramMediaFeedDataConverter;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.gson.Gson;
+import org.apache.commons.lang.StringUtils;
 import org.jinstagram.entity.users.feed.MediaFeedData;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.BufferedOutputStream;
+import java.io.BufferedReader;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.not;
 import static org.hamcrest.CoreMatchers.nullValue;
 import static org.junit.Assert.assertThat;
 
 /**
- * Tests conversion of instagram inputs to Activity
+ * Tests conversion of instagram inputs to Activity.
  */
+
 public class InstagramMediaFeedDataConverterIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(InstagramMediaFeedDataConverterIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramMediaFeedDataConverterIT.class);
 
-    // use gson because jInstagram's pojos do
-    private Gson gson = new Gson();
+  // use gson because jInstagram's pojos do
+  private Gson gson = new Gson();
 
-    // use jackson to write to file output
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  // use jackson to write to file output
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Test
-    public void InstagramMediaFeedDataConverterITCase() throws Exception {
-        InputStream is = InstagramMediaFeedDataConverterIT.class.getResourceAsStream("/testMediaFeedObjects.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+  @Test
+  public void InstagramMediaFeedDataConverterITCase() throws Exception {
+    InputStream is = InstagramMediaFeedDataConverterIT.class.getResourceAsStream("/testMediaFeedObjects.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
 
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream("target/test-classes/InstagramMediaFeedDataConverterITCase.txt")));
+    PrintStream outStream = new PrintStream(
+        new BufferedOutputStream(
+            new FileOutputStream("target/test-classes/InstagramMediaFeedDataConverterITCase.txt")));
 
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                if(!StringUtils.isEmpty(line))
-                {
-                    LOGGER.info("raw: {}", line);
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        if (!StringUtils.isEmpty(line)) {
+          LOGGER.info("raw: {}", line);
 
-                    MediaFeedData mediaFeedData = gson.fromJson(line, MediaFeedData.class);
+          MediaFeedData mediaFeedData = gson.fromJson(line, MediaFeedData.class);
 
-                    ActivityConverter<MediaFeedData> converter = new InstagramMediaFeedDataConverter();
+          ActivityConverter<MediaFeedData> converter = new InstagramMediaFeedDataConverter();
 
-                    Activity activity = converter.toActivityList(mediaFeedData).get(0);
+          Activity activity = converter.toActivityList(mediaFeedData).get(0);
 
-                    LOGGER.info("activity: {}", activity.toString());
+          LOGGER.info("activity: {}", activity.toString());
 
-                    assertThat(activity, is(not(nullValue())));
+          assertThat(activity, is(not(nullValue())));
 
-                    assertThat(activity.getId(), is(not(nullValue())));
-                    assertThat(activity.getActor(), is(not(nullValue())));
-                    assertThat(activity.getActor().getId(), is(not(nullValue())));
-                    assertThat(activity.getVerb(), is(not(nullValue())));
-                    assertThat(activity.getProvider(), is(not(nullValue())));
+          assertThat(activity.getId(), is(not(nullValue())));
+          assertThat(activity.getActor(), is(not(nullValue())));
+          assertThat(activity.getActor().getId(), is(not(nullValue())));
+          assertThat(activity.getVerb(), is(not(nullValue())));
+          assertThat(activity.getProvider(), is(not(nullValue())));
 
-                    outStream.println(mapper.writeValueAsString(activity));
+          outStream.println(mapper.writeValueAsString(activity));
 
-                }
+        }
 
-            }
-            outStream.flush();
+      }
+      outStream.flush();
 
-        } catch( Exception e ) {
-            LOGGER.error("Exception: ", e);
-            outStream.flush();
-            Assert.fail();
-        }
+    } catch ( Exception ex ) {
+      LOGGER.error("Exception: ", ex);
+      outStream.flush();
+      Assert.fail();
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramUserInfoDataConverterIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramUserInfoDataConverterIT.java b/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramUserInfoDataConverterIT.java
index dcc5eb1..61035d8 100644
--- a/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramUserInfoDataConverterIT.java
+++ b/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/data/InstagramUserInfoDataConverterIT.java
@@ -18,18 +18,15 @@
 
 package org.apache.streams.instagram.test.data;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.gson.Gson;
-import org.apache.commons.lang.StringUtils;
-import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.data.ActivityObjectConverter;
-import org.apache.streams.instagram.serializer.InstagramMediaFeedDataConverter;
 import org.apache.streams.instagram.serializer.InstagramUserInfoDataConverter;
 import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.gson.Gson;
+import org.apache.commons.lang.StringUtils;
 import org.jinstagram.entity.users.basicinfo.UserInfoData;
-import org.jinstagram.entity.users.feed.MediaFeedData;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -49,71 +46,73 @@ import static org.hamcrest.CoreMatchers.nullValue;
 import static org.junit.Assert.assertThat;
 
 /**
- * Tests conversion of instagram inputs to Activity
+ * Tests conversion of instagram inputs to Activity.
  */
 public class InstagramUserInfoDataConverterIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoDataConverterIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoDataConverterIT.class);
 
-    // use gson because jInstagram's pojos do
-    private Gson gson = new Gson();
+  // use gson because jInstagram's pojos do
+  private Gson gson = new Gson();
 
-    // use jackson to write to file output
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  // use jackson to write to file output
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Test
-    public void InstagramUserInfoDataConverterIT() throws Exception {
-        InputStream is = InstagramUserInfoDataConverterIT.class.getResourceAsStream("/testUserInfoData.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+  @Test
+  public void InstagramUserInfoDataConverterIT() throws Exception {
+    InputStream is = InstagramUserInfoDataConverterIT.class.getResourceAsStream("/testUserInfoData.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
 
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream("target/test-classes/InstagramUserInfoDataConverterIT.txt")));
+    PrintStream outStream = new PrintStream(
+        new BufferedOutputStream(
+            new FileOutputStream("target/test-classes/InstagramUserInfoDataConverterIT.txt")));
 
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                if(!StringUtils.isEmpty(line))
-                {
-                    LOGGER.info("raw: {}", line);
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        if (!StringUtils.isEmpty(line)) {
 
-                    UserInfoData userInfoData = gson.fromJson(line, UserInfoData.class);
+          LOGGER.info("raw: {}", line);
 
-                    ActivityObjectConverter<UserInfoData> converter = new InstagramUserInfoDataConverter();
+          UserInfoData userInfoData = gson.fromJson(line, UserInfoData.class);
 
-                    ActivityObject activityObject = converter.toActivityObject(userInfoData);
+          ActivityObjectConverter<UserInfoData> converter = new InstagramUserInfoDataConverter();
 
-                    LOGGER.info("activityObject: {}", activityObject.toString());
+          ActivityObject activityObject = converter.toActivityObject(userInfoData);
 
-                    assertThat(activityObject, is(not(nullValue())));
+          LOGGER.info("activityObject: {}", activityObject.toString());
 
-                    assertThat(activityObject.getId(), is(not(nullValue())));
-                    assertThat(activityObject.getImage(), is(not(nullValue())));
-                    assertThat(activityObject.getDisplayName(), is(not(nullValue())));
-                    assertThat(activityObject.getSummary(), is(not(nullValue())));
+          assertThat(activityObject, is(not(nullValue())));
 
-                    Map<String, Object> extensions = (Map<String, Object>)activityObject.getAdditionalProperties().get("extensions");
-                    assertThat(extensions, is(not(nullValue())));
-                    assertThat(extensions.get("following"), is(not(nullValue())));
-                    assertThat(extensions.get("followers"), is(not(nullValue())));
-                    assertThat(extensions.get("screenName"), is(not(nullValue())));
-                    assertThat(extensions.get("posts"), is(not(nullValue())));
+          assertThat(activityObject.getId(), is(not(nullValue())));
+          assertThat(activityObject.getImage(), is(not(nullValue())));
+          assertThat(activityObject.getDisplayName(), is(not(nullValue())));
+          assertThat(activityObject.getSummary(), is(not(nullValue())));
 
-                    assertThat(activityObject.getAdditionalProperties().get("handle"), is(not(nullValue())));
-                    assertThat(activityObject.getId(), is(not(nullValue())));
-                    assertThat(activityObject.getUrl(), is(not(nullValue())));
+          Map<String, Object> extensions = (Map<String, Object>)activityObject.getAdditionalProperties().get("extensions");
+          assertThat(extensions, is(not(nullValue())));
+          assertThat(extensions.get("following"), is(not(nullValue())));
+          assertThat(extensions.get("followers"), is(not(nullValue())));
+          assertThat(extensions.get("screenName"), is(not(nullValue())));
+          assertThat(extensions.get("posts"), is(not(nullValue())));
 
-                    assertThat(activityObject.getAdditionalProperties().get("provider"), is(not(nullValue())));
+          assertThat(activityObject.getAdditionalProperties().get("handle"), is(not(nullValue())));
+          assertThat(activityObject.getId(), is(not(nullValue())));
+          assertThat(activityObject.getUrl(), is(not(nullValue())));
 
-                    outStream.println(mapper.writeValueAsString(activityObject));
+          assertThat(activityObject.getAdditionalProperties().get("provider"), is(not(nullValue())));
 
-                }
-            }
-            outStream.flush();
+          outStream.println(mapper.writeValueAsString(activityObject));
 
-        } catch( Exception e ) {
-            LOGGER.error("Exception: ", e);
-            outStream.flush();
-            Assert.fail();
         }
+      }
+      outStream.flush();
+
+    } catch ( Exception ex ) {
+      LOGGER.error("Exception: ", ex);
+      outStream.flush();
+      Assert.fail();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramRecentMediaProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramRecentMediaProviderIT.java b/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramRecentMediaProviderIT.java
index ac55e2f..c5a4f77 100644
--- a/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramRecentMediaProviderIT.java
+++ b/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramRecentMediaProviderIT.java
@@ -19,6 +19,7 @@
 package org.apache.streams.instagram.test.providers;
 
 import org.apache.streams.instagram.provider.recentmedia.InstagramRecentMediaProvider;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -28,43 +29,43 @@ import java.io.FileReader;
 import java.io.LineNumberReader;
 
 /**
- * Created by sblackmon on 10/12/16.
+ * Integration Test for InstagramRecentMediaProvider.
  */
 public class InstagramRecentMediaProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramRecentMediaProviderIT.class);
-
-    @Test
-    public void testInstagramRecentMediaProvider() throws Exception {
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramRecentMediaProviderIT.class);
 
-        String configfile = "./target/test-classes/InstagramRecentMediaProviderIT.conf";
-        String outfile = "./target/test-classes/InstagramRecentMediaProviderIT.stdout.txt";
+  @Test
+  public void testInstagramRecentMediaProvider() throws Exception {
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String configfile = "./target/test-classes/InstagramRecentMediaProviderIT.conf";
+    String outfile = "./target/test-classes/InstagramRecentMediaProviderIT.stdout.txt";
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                InstagramRecentMediaProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        InstagramRecentMediaProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        while (outCounter.readLine() != null) {
-        }
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        assert (outCounter.getLineNumber() >= 1);
+    while (outCounter.readLine() != null) {
     }
+
+    assert (outCounter.getLineNumber() >= 1);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramUserInfoProviderIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramUserInfoProviderIT.java b/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramUserInfoProviderIT.java
index ec7cd0b..866b254 100644
--- a/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramUserInfoProviderIT.java
+++ b/streams-contrib/streams-provider-instagram/src/test/java/org/apache/streams/instagram/test/providers/InstagramUserInfoProviderIT.java
@@ -19,6 +19,7 @@
 package org.apache.streams.instagram.test.providers;
 
 import org.apache.streams.instagram.provider.userinfo.InstagramUserInfoProvider;
+
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -32,39 +33,39 @@ import java.io.LineNumberReader;
  */
 public class InstagramUserInfoProviderIT {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoProviderIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(InstagramUserInfoProviderIT.class);
 
-    @Test
-    public void testInstagramUserInfoProvider() throws Exception {
+  @Test
+  public void testInstagramUserInfoProvider() throws Exception {
 
-        String configfile = "./target/test-classes/InstagramUserInfoProviderIT.conf";
-        String outfile = "./target/test-classes/InstagramUserInfoProviderIT.stdout.txt";
+    String configfile = "./target/test-classes/InstagramUserInfoProviderIT.conf";
+    String outfile = "./target/test-classes/InstagramUserInfoProviderIT.stdout.txt";
 
-        String[] args = new String[2];
-        args[0] = configfile;
-        args[1] = outfile;
+    String[] args = new String[2];
+    args[0] = configfile;
+    args[1] = outfile;
 
-        Thread testThread = new Thread((Runnable) () -> {
-            try {
-                InstagramUserInfoProvider.main(args);
-            } catch( Exception e ) {
-                LOGGER.error("Test Exception!", e);
-            }
-        });
-        testThread.start();
-        testThread.join(60000);
+    Thread testThread = new Thread((Runnable) () -> {
+      try {
+        InstagramUserInfoProvider.main(args);
+      } catch ( Exception ex ) {
+        LOGGER.error("Test Exception!", ex);
+      }
+    });
+    testThread.start();
+    testThread.join(60000);
 
-        File out = new File(outfile);
-        assert (out.exists());
-        assert (out.canRead());
-        assert (out.isFile());
+    File out = new File(outfile);
+    assert (out.exists());
+    assert (out.canRead());
+    assert (out.isFile());
 
-        FileReader outReader = new FileReader(out);
-        LineNumberReader outCounter = new LineNumberReader(outReader);
+    FileReader outReader = new FileReader(out);
+    LineNumberReader outCounter = new LineNumberReader(outReader);
 
-        while(outCounter.readLine() != null) {}
+    while (outCounter.readLine() != null) {}
 
-        assert (outCounter.getLineNumber() >= 1);
+    assert (outCounter.getLineNumber() >= 1);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverClient.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverClient.java b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverClient.java
index 05e6120..d9774ed 100644
--- a/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverClient.java
+++ b/streams-contrib/streams-provider-moreover/src/main/java/org/apache/streams/moreover/MoreoverClient.java
@@ -33,78 +33,89 @@ import java.nio.charset.Charset;
 import java.util.Date;
 
 /**
- *
+ * MoreoverClient is a Client for Moreover.
  */
 public class MoreoverClient {
-    private static final Logger logger = LoggerFactory.getLogger(MoreoverClient.class);
+  private static final Logger logger = LoggerFactory.getLogger(MoreoverClient.class);
 
-    private static final String BASE_URL = "http://metabase.moreover.com/api/v10/articles?key=%s&limit=%s&sequence_id=%s";
-    private final String id;
-    private String apiKey;
-    private BigInteger lastSequenceId = BigInteger.ZERO;
-    //testing purpose only
-    public long pullTime;
-    private boolean debug;
+  private static final String BASE_URL = "http://metabase.moreover.com/api/v10/articles?key=%s&limit=%s&sequence_id=%s";
+  private final String id;
+  private String apiKey;
+  private BigInteger lastSequenceId = BigInteger.ZERO;
+  //testing purpose only
+  public long pullTime;
+  private boolean debug;
 
-    public MoreoverClient(String id, String apiKey, String sequence) {
-        logger.info("Constructed new client for id:{} key:{} sequence:{}", id, apiKey, sequence);
-        this.id = id;
-        this.apiKey = apiKey;
-        this.lastSequenceId = new BigInteger(sequence);
-    }
+  /**
+   * MoreoverClient constructor.
+   * @param id id
+   * @param apiKey apiKey
+   * @param sequence sequence
+   */
+  public MoreoverClient(String id, String apiKey, String sequence) {
+    logger.info("Constructed new client for id:{} key:{} sequence:{}", id, apiKey, sequence);
+    this.id = id;
+    this.apiKey = apiKey;
+    this.lastSequenceId = new BigInteger(sequence);
+  }
 
-    public MoreoverResult getArticlesAfter(String sequenceId, int limit) throws IOException {
-        String urlString = String.format(BASE_URL, this.apiKey, limit, sequenceId);
-        logger.debug("Making call to {}", urlString);
-        long start = System.nanoTime();
-        MoreoverResult result = new MoreoverResult(id, getArticles(new URL(urlString)), start, System.nanoTime());
-        if(!result.getMaxSequencedId().equals(BigInteger.ZERO))
-        {
-            this.lastSequenceId = result.getMaxSequencedId();
-            logger.debug("Maximum sequence from last call {}", this.lastSequenceId);
-        }
-        else
-            logger.debug("No maximum sequence returned in last call {}", this.lastSequenceId);
-        return result;
+  /**
+   * get limit ArticlesAfter sequenceId.
+   * @param sequenceId sequenceId
+   * @param limit limit
+   * @return MoreoverResult
+   * @throws IOException IOException
+   */
+  public MoreoverResult getArticlesAfter(String sequenceId, int limit) throws IOException {
+    String urlString = String.format(BASE_URL, this.apiKey, limit, sequenceId);
+    logger.debug("Making call to {}", urlString);
+    long start = System.nanoTime();
+    MoreoverResult result = new MoreoverResult(id, getArticles(new URL(urlString)), start, System.nanoTime());
+    if (!result.getMaxSequencedId().equals(BigInteger.ZERO)) {
+      this.lastSequenceId = result.getMaxSequencedId();
+      logger.debug("Maximum sequence from last call {}", this.lastSequenceId);
+    } else {
+      logger.debug("No maximum sequence returned in last call {}", this.lastSequenceId);
     }
+    return result;
+  }
 
-    public MoreoverResult getNextBatch() throws IOException{
-        logger.debug("Getting next results for {} {} {}", this.id, this.apiKey, this.lastSequenceId);
-        return getArticlesAfter(this.lastSequenceId.toString(), 500);
-    }
+  public MoreoverResult getNextBatch() throws IOException {
+    logger.debug("Getting next results for {} {} {}", this.id, this.apiKey, this.lastSequenceId);
+    return getArticlesAfter(this.lastSequenceId.toString(), 500);
+  }
 
-    private String getArticles2(URL url) throws IOException {
-        HttpURLConnection cn = (HttpURLConnection) url.openConnection();
-        cn.setRequestMethod("GET");
-        cn.addRequestProperty("Content-Type", "text/xml;charset=UTF-8");
-        cn.setDoInput(true);
-        cn.setDoOutput(false);
-        BufferedReader reader = new BufferedReader(new InputStreamReader(cn.getInputStream(), Charset.forName("UTF-8")));
-        String line = null;
-        StringBuilder builder = new StringBuilder();
-        String s = "";
-        String result = new String(s.getBytes(Charset.forName("UTF-8")), Charset.forName("UTF-8"));
-        while((line = reader.readLine()) != null) {
-            result+=line;
-        }
-        pullTime = new Date().getTime();
-        return result;
+  private String getArticles2(URL url) throws IOException {
+    HttpURLConnection cn = (HttpURLConnection) url.openConnection();
+    cn.setRequestMethod("GET");
+    cn.addRequestProperty("Content-Type", "text/xml;charset=UTF-8");
+    cn.setDoInput(true);
+    cn.setDoOutput(false);
+    BufferedReader reader = new BufferedReader(new InputStreamReader(cn.getInputStream(), Charset.forName("UTF-8")));
+    String line = null;
+    StringBuilder builder = new StringBuilder();
+    String result = new String("".getBytes(Charset.forName("UTF-8")), Charset.forName("UTF-8"));
+    while ((line = reader.readLine()) != null) {
+      result += line;
     }
+    pullTime = new Date().getTime();
+    return result;
+  }
 
-    private String getArticles(URL url) throws IOException{
-        HttpURLConnection cn = (HttpURLConnection) url.openConnection();
-        cn.setRequestMethod("GET");
-        cn.addRequestProperty("Content-Type", "text/xml;charset=UTF-8");
-        cn.setDoInput(true);
-        cn.setDoOutput(false);
-        StringWriter writer = new StringWriter();
-        IOUtils.copy(new InputStreamReader(cn.getInputStream(), Charset.forName("UTF-8")), writer);
-        writer.flush();
-        pullTime = new Date().getTime();
+  private String getArticles(URL url) throws IOException {
+    HttpURLConnection cn = (HttpURLConnection) url.openConnection();
+    cn.setRequestMethod("GET");
+    cn.addRequestProperty("Content-Type", "text/xml;charset=UTF-8");
+    cn.setDoInput(true);
+    cn.setDoOutput(false);
+    StringWriter writer = new StringWriter();
+    IOUtils.copy(new InputStreamReader(cn.getInputStream(), Charset.forName("UTF-8")), writer);
+    writer.flush();
+    pullTime = new Date().getTime();
 
-        // added after seeing java.net.SocketException: Too many open files
-        cn.disconnect();
+    // added after seeing java.net.SocketException: Too many open files
+    cn.disconnect();
 
-        return writer.toString();
-    }
+    return writer.toString();
+  }
 }


[35/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherQueryGraphHelper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherQueryGraphHelper.java b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherQueryGraphHelper.java
index 86ab72f..e322990 100644
--- a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherQueryGraphHelper.java
+++ b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/CypherQueryGraphHelper.java
@@ -18,17 +18,19 @@
 
 package org.apache.streams.graph.neo4j;
 
+import org.apache.streams.data.util.PropertyUtil;
+import org.apache.streams.graph.QueryGraphHelper;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+import org.apache.streams.pojo.json.ActivityObject;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import org.apache.streams.data.util.PropertyUtil;
-import org.apache.streams.graph.QueryGraphHelper;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
+
 import org.javatuples.Pair;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -42,178 +44,196 @@ import java.util.Map;
  */
 public class CypherQueryGraphHelper implements QueryGraphHelper {
 
-    private final static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(Neo4jHttpGraphHelper.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(Neo4jHttpGraphHelper.class);
 
-    public final static String getVertexLongIdStatementTemplate = "MATCH (v) WHERE ID(v) = <id> RETURN v";
-    public final static String getVertexStringIdStatementTemplate = "MATCH (v {id: '<id>'} ) RETURN v";
+  public static final String getVertexLongIdStatementTemplate = "MATCH (v) WHERE ID(v) = <id> RETURN v";
+  public static final String getVertexStringIdStatementTemplate = "MATCH (v {id: '<id>'} ) RETURN v";
 
-    public final static String createVertexStatementTemplate = "MATCH (x {id: '<id>'}) "+
-            "CREATE UNIQUE (v:<type> { props }) "+
-            "ON CREATE SET v <labels> "+
-            "RETURN v";
+  public static final String createVertexStatementTemplate =
+      "MATCH (x {id: '<id>'}) "
+          + "CREATE UNIQUE (v:<type> { props }) "
+          + "ON CREATE SET v <labels> "
+          + "RETURN v";
 
-    public final static String mergeVertexStatementTemplate = "MERGE (v:<type> {id: '<id>'}) "+
-            "ON CREATE SET v <labels>, v = { props }, v.`@timestamp` = timestamp() "+
-            "ON MATCH SET v <labels>, v = { props }, v.`@timestamp` = timestamp() "+
-            "RETURN v";
 
-    public final static String createEdgeStatementTemplate = "MATCH (s:<s_type> {id: '<s_id>'}),(d:<d_type> {id: '<d_id>'}) "+
-            "CREATE UNIQUE (s)-[r:<r_type> <r_props>]->(d) "+
-            "RETURN r";
 
-    public Pair<String, Map<String, Object>> getVertexRequest(String streamsId) {
+  public static final String mergeVertexStatementTemplate =
+      "MERGE (v:<type> {id: '<id>'}) "
+          + "ON CREATE SET v <labels>, v = { props }, v.`@timestamp` = timestamp() "
+          + "ON MATCH SET v <labels>, v = { props }, v.`@timestamp` = timestamp() "
+          + "RETURN v";
 
-        ST getVertex = new ST(getVertexStringIdStatementTemplate);
-        getVertex.add("id", streamsId);
+  public static final String createEdgeStatementTemplate =
+      "MATCH (s:<s_type> {id: '<s_id>'}),(d:<d_type> {id: '<d_id>'}) "
+          + "CREATE UNIQUE (s)-[r:<r_type> <r_props>]->(d) "
+          + "RETURN r";
 
-        Pair<String, Map<String, Object>> queryPlusParameters = new Pair(getVertex.render(), null);
+  /**
+   * getVertexRequest.
+   * @param streamsId streamsId
+   * @return pair (streamsId, parameterMap)
+   */
+  public Pair<String, Map<String, Object>> getVertexRequest(String streamsId) {
 
-        LOGGER.debug("getVertexRequest", queryPlusParameters.toString());
+    ST getVertex = new ST(getVertexStringIdStatementTemplate);
+    getVertex.add("id", streamsId);
 
-        return queryPlusParameters;
-    }
-
-    @Override
-    public Pair<String, Map<String, Object>> getVertexRequest(Long vertexId) {
+    Pair<String, Map<String, Object>> queryPlusParameters = new Pair(getVertex.render(), null);
 
-        ST getVertex = new ST(getVertexLongIdStatementTemplate);
-        getVertex.add("id", vertexId);
+    LOGGER.debug("getVertexRequest", queryPlusParameters.toString());
 
-        Pair<String, Map<String, Object>> queryPlusParameters = new Pair(getVertex.render(), null);
+    return queryPlusParameters;
+  }
 
-        LOGGER.debug("getVertexRequest", queryPlusParameters.toString());
+  /**
+   * getVertexRequest.
+   * @param vertexId numericId
+   * @return pair (streamsId, parameterMap)
+   */
+  @Override
+  public Pair<String, Map<String, Object>> getVertexRequest(Long vertexId) {
 
-        return queryPlusParameters;
+    ST getVertex = new ST(getVertexLongIdStatementTemplate);
+    getVertex.add("id", vertexId);
 
-    }
+    Pair<String, Map<String, Object>> queryPlusParameters = new Pair(getVertex.render(), null);
 
-    public Pair<String, Map<String, Object>> createVertexRequest(ActivityObject activityObject) {
+    LOGGER.debug("getVertexRequest", queryPlusParameters.toString());
 
-        Preconditions.checkNotNull(activityObject.getObjectType());
+    return queryPlusParameters;
 
-        List<String> labels = getLabels(activityObject);
+  }
 
-        ST createVertex = new ST(createVertexStatementTemplate);
-        createVertex.add("id", activityObject.getId());
-        createVertex.add("type", activityObject.getObjectType());
-        if( labels.size() > 0)
-            createVertex.add("labels", Joiner.on(' ').join(labels));
-        String query = createVertex.render();
+  /**
+   * createVertexRequest.
+   * @param activityObject activityObject
+   * @return pair (query, parameterMap)
+   */
+  public Pair<String, Map<String, Object>> createVertexRequest(ActivityObject activityObject) {
 
-        ObjectNode object = MAPPER.convertValue(activityObject, ObjectNode.class);
-        Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
+    Preconditions.checkNotNull(activityObject.getObjectType());
 
-        Pair<String, Map<String, Object>> queryPlusParameters = new Pair(createVertex.render(), props);
+    List<String> labels = getLabels(activityObject);
 
-        LOGGER.debug("createVertexRequest: ({},{})", query, props);
+    ST createVertex = new ST(createVertexStatementTemplate);
+    createVertex.add("id", activityObject.getId());
+    createVertex.add("type", activityObject.getObjectType());
 
-        return queryPlusParameters;
+    if ( labels.size() > 0 ) {
+      createVertex.add("labels", Joiner.on(' ').join(labels));
     }
 
-    public Pair<String, Map<String, Object>> mergeVertexRequest(ActivityObject activityObject) {
+    String query = createVertex.render();
 
-        Preconditions.checkNotNull(activityObject.getObjectType());
+    ObjectNode object = MAPPER.convertValue(activityObject, ObjectNode.class);
+    Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
 
-        Pair queryPlusParameters = new Pair(null, Maps.newHashMap());
+    Pair<String, Map<String, Object>> queryPlusParameters = new Pair(createVertex.render(), props);
 
-        List<String> labels = getLabels(activityObject);
+    LOGGER.debug("createVertexRequest: ({},{})", query, props);
 
-        ST mergeVertex = new ST(mergeVertexStatementTemplate);
-        mergeVertex.add("id", activityObject.getId());
-        mergeVertex.add("type", activityObject.getObjectType());
-        if( labels.size() > 0)
-            mergeVertex.add("labels", Joiner.on(' ').join(labels));
-        String query = mergeVertex.render();
+    return queryPlusParameters;
+  }
 
-        ObjectNode object = MAPPER.convertValue(activityObject, ObjectNode.class);
-        Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
+  /**
+   * mergeVertexRequest.
+   * @param activityObject activityObject
+   * @return pair (query, parameterMap)
+   */
+  public Pair<String, Map<String, Object>> mergeVertexRequest(ActivityObject activityObject) {
 
-        LOGGER.debug("mergeVertexRequest: ({},{})", query, props);
+    Preconditions.checkNotNull(activityObject.getObjectType());
 
-        queryPlusParameters = queryPlusParameters.setAt0(query);
-        queryPlusParameters = queryPlusParameters.setAt1(props);
+    Pair queryPlusParameters = new Pair(null, Maps.newHashMap());
 
-        return queryPlusParameters;
-    }
-
-    public Pair<String, Map<String, Object>> createEdgeRequest(Activity activity) {
-
-        Pair queryPlusParameters = new Pair(null, Maps.newHashMap());
-
-        ObjectNode object = MAPPER.convertValue(activity, ObjectNode.class);
-        Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
-
-        ST mergeEdge = new ST(createEdgeStatementTemplate);
-        mergeEdge.add("s_id", activity.getActor().getId());
-        mergeEdge.add("s_type", activity.getActor().getObjectType());
-        mergeEdge.add("d_id", activity.getObject().getId());
-        mergeEdge.add("d_type", activity.getObject().getObjectType());
-        mergeEdge.add("r_id", activity.getId());
-        mergeEdge.add("r_type", activity.getVerb());
-        mergeEdge.add("r_props", getPropertyCreater(props));
-
-        // set the activityObject's and extensions null, because their properties don't need to appear on the relationship
-        activity.setActor(null);
-        activity.setObject(null);
-        activity.setTarget(null);
-        activity.getAdditionalProperties().put("extensions", null);
-
-        String statement = mergeEdge.render();
-        queryPlusParameters = queryPlusParameters.setAt0(statement);
-        queryPlusParameters = queryPlusParameters.setAt1(props);
+    List<String> labels = getLabels(activityObject);
 
-        LOGGER.debug("createEdgeRequest: ({},{})", statement, props);
-
-        return queryPlusParameters;
-    }
-
-    public static String getPropertyValueSetter(Map<String, Object> map, String symbol) {
-        StringBuilder builder = new StringBuilder();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String)(entry.getValue());
-                builder.append("," + symbol + ".`" + entry.getKey() + "` = '" + propVal + "'");
-            }
-        }
-        return builder.toString();
+    ST mergeVertex = new ST(mergeVertexStatementTemplate);
+    mergeVertex.add("id", activityObject.getId());
+    mergeVertex.add("type", activityObject.getObjectType());
+    if ( labels.size() > 0 ) {
+      mergeVertex.add("labels", Joiner.on(' ').join(labels));
     }
-
-    public static String getPropertyParamSetter(Map<String, Object> map, String symbol) {
-        StringBuilder builder = new StringBuilder();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String)(entry.getValue());
-                builder.append("," + symbol + ".`" + entry.getKey() + "` = '" + propVal + "'");
-            }
-        }
-        return builder.toString();
+    String query = mergeVertex.render();
+
+    ObjectNode object = MAPPER.convertValue(activityObject, ObjectNode.class);
+    Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
+
+    LOGGER.debug("mergeVertexRequest: ({},{})", query, props);
+
+    queryPlusParameters = queryPlusParameters.setAt0(query);
+    queryPlusParameters = queryPlusParameters.setAt1(props);
+
+    return queryPlusParameters;
+  }
+
+  /**
+   * createEdgeRequest.
+   * @param activity activity
+   * @return pair (query, parameterMap)
+   */
+  public Pair<String, Map<String, Object>> createEdgeRequest(Activity activity) {
+
+    Pair queryPlusParameters = new Pair(null, Maps.newHashMap());
+
+    ObjectNode object = MAPPER.convertValue(activity, ObjectNode.class);
+    Map<String, Object> props = PropertyUtil.flattenToMap(object, '.');
+
+    ST mergeEdge = new ST(createEdgeStatementTemplate);
+    mergeEdge.add("s_id", activity.getActor().getId());
+    mergeEdge.add("s_type", activity.getActor().getObjectType());
+    mergeEdge.add("d_id", activity.getObject().getId());
+    mergeEdge.add("d_type", activity.getObject().getObjectType());
+    mergeEdge.add("r_id", activity.getId());
+    mergeEdge.add("r_type", activity.getVerb());
+    mergeEdge.add("r_props", getPropertyCreater(props));
+
+    // set the activityObject's and extensions null, because their properties don't need to appear on the relationship
+    activity.setActor(null);
+    activity.setObject(null);
+    activity.setTarget(null);
+    activity.getAdditionalProperties().put("extensions", null);
+
+    String statement = mergeEdge.render();
+    queryPlusParameters = queryPlusParameters.setAt0(statement);
+    queryPlusParameters = queryPlusParameters.setAt1(props);
+
+    LOGGER.debug("createEdgeRequest: ({},{})", statement, props);
+
+    return queryPlusParameters;
+  }
+
+  /**
+   * getPropertyCreater.
+   * @param map paramMap
+   * @return PropertyCreater string
+   */
+  public static String getPropertyCreater(Map<String, Object> map) {
+    StringBuilder builder = new StringBuilder();
+    builder.append("{");
+    List<String> parts = Lists.newArrayList();
+    for ( Map.Entry<String, Object> entry : map.entrySet()) {
+      if ( entry.getValue() instanceof String ) {
+        String propVal = (String) (entry.getValue());
+        parts.add("`" + entry.getKey() + "`:'" + propVal + "'");
+      }
     }
-
-    public static String getPropertyCreater(Map<String, Object> map) {
-        StringBuilder builder = new StringBuilder();
-        builder.append("{");
-        List<String> parts = Lists.newArrayList();
-        for( Map.Entry<String, Object> entry : map.entrySet()) {
-            if( entry.getValue() instanceof String ) {
-                String propVal = (String) (entry.getValue());
-                parts.add("`"+entry.getKey() + "`:'" + propVal + "'");
-            }
-        }
-        builder.append(Joiner.on(",").join(parts));
-        builder.append("}");
-        return builder.toString();
-    }
-
-    private List<String> getLabels(ActivityObject activityObject) {
-        List<String> labels = Lists.newArrayList(":streams");
-        if( activityObject.getAdditionalProperties().containsKey("labels") ) {
-            List<String> extraLabels = (List<String>)activityObject.getAdditionalProperties().get("labels");
-            for( String extraLabel : extraLabels )
-                labels.add(":"+extraLabel);
-        }
-        return labels;
+    builder.append(Joiner.on(",").join(parts));
+    builder.append("}");
+    return builder.toString();
+  }
+
+  private List<String> getLabels(ActivityObject activityObject) {
+    List<String> labels = Lists.newArrayList(":streams");
+    if ( activityObject.getAdditionalProperties().containsKey("labels") ) {
+      List<String> extraLabels = (List<String>)activityObject.getAdditionalProperties().get("labels");
+      for ( String extraLabel : extraLabels ) {
+        labels.add(":" + extraLabel);
+      }
     }
+    return labels;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/Neo4jHttpGraphHelper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/Neo4jHttpGraphHelper.java b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/Neo4jHttpGraphHelper.java
index 48fe62d..72e668f 100644
--- a/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/Neo4jHttpGraphHelper.java
+++ b/streams-contrib/streams-persist-graph/src/main/java/org/apache/streams/graph/neo4j/Neo4jHttpGraphHelper.java
@@ -18,58 +18,59 @@
 
 package org.apache.streams.graph.neo4j;
 
+import org.apache.streams.graph.HttpGraphHelper;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import org.apache.streams.data.util.PropertyUtil;
-import org.apache.streams.graph.HttpGraphHelper;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
+
 import org.javatuples.Pair;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.stringtemplate.v4.ST;
 
-import java.util.List;
 import java.util.Map;
 
 /**
- * Supporting class for interacting with neo4j via rest API
+ * Supporting class for interacting with neo4j via rest API.
  */
 public class Neo4jHttpGraphHelper implements HttpGraphHelper {
 
-    private final static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(Neo4jHttpGraphHelper.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(Neo4jHttpGraphHelper.class);
 
-    public final static String statementKey = "statement";
-    public final static String paramsKey = "parameters";
-    public final static String propsKey = "props";
+  public static final String statementKey = "statement";
+  public static final String paramsKey = "parameters";
+  public static final String propsKey = "props";
 
-    public ObjectNode createHttpRequest(Pair<String, Map<String, Object>> queryPlusParameters) {
+  /**
+   * createHttpRequest neo4j rest json payload.
+   *
+   * @param queryPlusParameters (query, parameter map)
+   * @return ObjectNode
+   */
+  public ObjectNode createHttpRequest(Pair<String, Map<String, Object>> queryPlusParameters) {
 
-        LOGGER.debug("createHttpRequest: ", queryPlusParameters);
+    LOGGER.debug("createHttpRequest: ", queryPlusParameters);
 
-        Preconditions.checkNotNull(queryPlusParameters);
-        Preconditions.checkNotNull(queryPlusParameters.getValue0());
-        Preconditions.checkNotNull(queryPlusParameters.getValue1());
+    Preconditions.checkNotNull(queryPlusParameters);
+    Preconditions.checkNotNull(queryPlusParameters.getValue0());
+    Preconditions.checkNotNull(queryPlusParameters.getValue1());
 
-        ObjectNode request = MAPPER.createObjectNode();
+    ObjectNode request = MAPPER.createObjectNode();
 
-        request.put(statementKey, queryPlusParameters.getValue0());
+    request.put(statementKey, queryPlusParameters.getValue0());
 
-        ObjectNode params = MAPPER.createObjectNode();
-        ObjectNode props = MAPPER.convertValue(queryPlusParameters.getValue1(), ObjectNode.class);
+    ObjectNode params = MAPPER.createObjectNode();
+    ObjectNode props = MAPPER.convertValue(queryPlusParameters.getValue1(), ObjectNode.class);
 
-        params.put(propsKey, props);
-        request.put(paramsKey, params);
+    params.put(propsKey, props);
+    request.put(paramsKey, params);
 
-        LOGGER.debug("createHttpRequest: ", request);
+    LOGGER.debug("createHttpRequest: ", request);
 
-        return request;
-    }
+    return request;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestCypherQueryGraphHelper.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestCypherQueryGraphHelper.java b/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestCypherQueryGraphHelper.java
index 3f889aa..c29c8b7 100644
--- a/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestCypherQueryGraphHelper.java
+++ b/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestCypherQueryGraphHelper.java
@@ -16,98 +16,101 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.graph.test;
 
 import org.apache.streams.graph.neo4j.CypherQueryGraphHelper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
 import org.javatuples.Pair;
 import org.junit.Test;
 
 import java.util.Map;
 
 /**
- * Created by sblackmon on 6/24/15.
+ * TestCypherQueryGraphHelper tests
+ * @see org.apache.streams.graph.neo4j.CypherQueryGraphHelper
  */
 public class TestCypherQueryGraphHelper {
 
-    CypherQueryGraphHelper helper = new CypherQueryGraphHelper();
+  CypherQueryGraphHelper helper = new CypherQueryGraphHelper();
 
-    @Test
-    public void getVertexRequestIdTest() throws Exception {
+  @Test
+  public void getVertexRequestIdTest() throws Exception {
 
-        Pair<String, Map<String, Object>> queryAndParams = helper.getVertexRequest("id");
-        assert(queryAndParams != null);
-        assert(queryAndParams.getValue0() != null);
+    Pair<String, Map<String, Object>> queryAndParams = helper.getVertexRequest("id");
+    assert(queryAndParams != null);
+    assert(queryAndParams.getValue0() != null);
 
-    }
+  }
 
-    @Test
-    public void getVertexRequestLongTest() throws Exception {
+  @Test
+  public void getVertexRequestLongTest() throws Exception {
 
-        Pair<String, Map<String, Object>> queryAndParams = helper.getVertexRequest(new Long(1));
+    Pair<String, Map<String, Object>> queryAndParams = helper.getVertexRequest(new Long(1));
 
-        assert(queryAndParams != null);
-        assert(queryAndParams.getValue0() != null);
+    assert(queryAndParams != null);
+    assert(queryAndParams.getValue0() != null);
 
-    }
+  }
 
-    @Test
-    public void createVertexRequestTest() throws Exception {
+  @Test
+  public void createVertexRequestTest() throws Exception {
 
-        ActivityObject activityObject = new ActivityObject();
-        activityObject.setId("id");
-        activityObject.setObjectType("type");
-        activityObject.setContent("content");
+    ActivityObject activityObject = new ActivityObject();
+    activityObject.setId("id");
+    activityObject.setObjectType("type");
+    activityObject.setContent("content");
 
-        Pair<String, Map<String, Object>> queryAndParams = helper.createVertexRequest(activityObject);
-        assert(queryAndParams != null);
-        assert(queryAndParams.getValue0() != null);
-        assert(queryAndParams.getValue1() != null);
+    Pair<String, Map<String, Object>> queryAndParams = helper.createVertexRequest(activityObject);
+    assert(queryAndParams != null);
+    assert(queryAndParams.getValue0() != null);
+    assert(queryAndParams.getValue1() != null);
 
-    }
+  }
 
-    @Test
-    public void mergeVertexRequestTest() throws Exception {
+  @Test
+  public void mergeVertexRequestTest() throws Exception {
 
-        ActivityObject activityObject = new ActivityObject();
-        activityObject.setId("id");
-        activityObject.setObjectType("type");
-        activityObject.setContent("content");
+    ActivityObject activityObject = new ActivityObject();
+    activityObject.setId("id");
+    activityObject.setObjectType("type");
+    activityObject.setContent("content");
 
-        Pair<String, Map<String, Object>> queryAndParams = helper.mergeVertexRequest(activityObject);
-        assert(queryAndParams != null);
-        assert(queryAndParams.getValue0() != null);
-        assert(queryAndParams.getValue1() != null);
+    Pair<String, Map<String, Object>> queryAndParams = helper.mergeVertexRequest(activityObject);
+    assert(queryAndParams != null);
+    assert(queryAndParams.getValue0() != null);
+    assert(queryAndParams.getValue1() != null);
 
-    }
+  }
 
-    @Test
-    public void createEdgeRequestTest() throws Exception {
+  @Test
+  public void createEdgeRequestTest() throws Exception {
 
-        ActivityObject actor = new ActivityObject();
-        actor.setId("actor");
-        actor.setObjectType("type");
-        actor.setContent("content");
+    ActivityObject actor = new ActivityObject();
+    actor.setId("actor");
+    actor.setObjectType("type");
+    actor.setContent("content");
 
-        ActivityObject object = new ActivityObject();
-        object.setId("object");
-        object.setObjectType("type");
-        object.setContent("content");
+    ActivityObject object = new ActivityObject();
+    object.setId("object");
+    object.setObjectType("type");
+    object.setContent("content");
 
-        Activity activity = new Activity();
-        activity.setId("activity");
-        activity.setVerb("verb");
-        activity.setContent("content");
+    Activity activity = new Activity();
+    activity.setId("activity");
+    activity.setVerb("verb");
+    activity.setContent("content");
 
-        activity.setActor(actor);
-        activity.setObject(object);
+    activity.setActor(actor);
+    activity.setObject(object);
 
-        Pair<String, Map<String, Object>> queryAndParams = helper.createEdgeRequest(activity);
+    Pair<String, Map<String, Object>> queryAndParams = helper.createEdgeRequest(activity);
 
-        assert(queryAndParams != null);
-        assert(queryAndParams.getValue0() != null);
-        assert(queryAndParams.getValue1() != null);
+    assert(queryAndParams != null);
+    assert(queryAndParams.getValue0() != null);
+    assert(queryAndParams.getValue1() != null);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestNeo4jHttpVertexReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestNeo4jHttpVertexReader.java b/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestNeo4jHttpVertexReader.java
index eb7ce96..673b402 100644
--- a/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestNeo4jHttpVertexReader.java
+++ b/streams-contrib/streams-persist-graph/src/test/java/org/apache/streams/graph/test/TestNeo4jHttpVertexReader.java
@@ -18,14 +18,16 @@
 
 package org.apache.streams.graph.test;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.commons.io.IOUtils;
 import org.apache.streams.graph.GraphHttpConfiguration;
 import org.apache.streams.graph.GraphReaderConfiguration;
 import org.apache.streams.graph.GraphVertexReader;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+import org.apache.commons.io.IOUtils;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -39,43 +41,43 @@ import java.util.List;
  * Unit test for
  * @see {@link org.apache.streams.graph.GraphVertexReader}
  *
- * Test that graph db responses can be converted to streams data
+ * Test that graph db responses can be converted to streams data.
  */
 public class TestNeo4jHttpVertexReader {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TestNeo4jHttpVertexReader.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TestNeo4jHttpVertexReader.class);
 
-    private final static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    private JsonNode sampleReaderResult;
+  private JsonNode sampleReaderResult;
 
-    private GraphReaderConfiguration testConfiguration;
+  private GraphReaderConfiguration testConfiguration;
 
-    private GraphVertexReader graphPersistReader;
+  private GraphVertexReader graphPersistReader;
 
-    @Before
-    public void prepareTest() throws IOException {
+  @Before
+  public void prepareTest() throws IOException {
 
-        testConfiguration = new GraphReaderConfiguration();
-        testConfiguration.setType(GraphHttpConfiguration.Type.NEO_4_J);
+    testConfiguration = new GraphReaderConfiguration();
+    testConfiguration.setType(GraphHttpConfiguration.Type.NEO_4_J);
 
-        graphPersistReader = new GraphVertexReader(testConfiguration);
-        InputStream testActivityFileStream = TestNeo4jHttpVertexReader.class.getClassLoader()
-                .getResourceAsStream("sampleReaderResult.json");
-        String sampleText = IOUtils.toString(testActivityFileStream, "utf-8");
-        sampleReaderResult = mapper.readValue(sampleText, JsonNode.class);
+    graphPersistReader = new GraphVertexReader(testConfiguration);
+    InputStream testActivityFileStream = TestNeo4jHttpVertexReader.class.getClassLoader()
+        .getResourceAsStream("sampleReaderResult.json");
+    String sampleText = IOUtils.toString(testActivityFileStream, "utf-8");
+    sampleReaderResult = mapper.readValue(sampleText, JsonNode.class);
 
-    }
+  }
 
-    @Test
-    public void testParseNeoResult() throws IOException {
+  @Test
+  public void testParseNeoResult() throws IOException {
 
-        List<ObjectNode> result = graphPersistReader.parse(sampleReaderResult);
+    List<ObjectNode> result = graphPersistReader.parse(sampleReaderResult);
 
-        assert( result.size() == 10);
+    assert( result.size() == 10);
 
-        for( int i = 0 ; i < 10; i++ )
-            assert( result.get(i).get("extensions").size() == 5);
+    for( int i = 0 ; i < 10; i++ )
+      assert( result.get(i).get("extensions").size() == 5);
 
-    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriter.java b/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriter.java
index 1e066fb..c5a06fc 100644
--- a/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriter.java
+++ b/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriter.java
@@ -18,13 +18,15 @@
 
 package org.apache.streams.hbase;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistWriter;
+import org.apache.streams.util.GuidUtils;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import java.io.Closeable;
-import java.io.Flushable;
-import java.io.IOException;
-import java.util.Queue;
-import java.util.concurrent.ConcurrentLinkedQueue;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.client.HConnection;
@@ -33,187 +35,195 @@ import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.HTablePool;
 import org.apache.hadoop.hbase.client.Put;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistWriter;
-import org.apache.streams.util.GuidUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class HbasePersistWriter implements StreamsPersistWriter, Flushable, Closeable
-{
-    public final static String STREAMS_ID = "HbasePersistWriter";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(HbasePersistWriter.class);
-
-    protected HConnection connection;
-    protected HTablePool pool;
-    protected HTableInterface table;
-    protected HTableDescriptor descriptor;
-
-    protected volatile Queue<StreamsDatum> persistQueue;
-
-    private ObjectMapper mapper = new ObjectMapper();
-
-    private HbaseConfiguration config;
+import java.io.Closeable;
+import java.io.Flushable;
+import java.io.IOException;
+import java.util.Queue;
+import java.util.concurrent.ConcurrentLinkedQueue;
 
-    public HbasePersistWriter() {
-        this.config = new ComponentConfigurator<>(HbaseConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("hbase"));
-        this.persistQueue  = new ConcurrentLinkedQueue<>();
+/**
+ * HbasePersistWriter writes to hbase.
+ */
+public class HbasePersistWriter implements StreamsPersistWriter, Flushable, Closeable {
+
+  public static final String STREAMS_ID = "HbasePersistWriter";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(HbasePersistWriter.class);
+
+  protected HConnection connection;
+  protected HTablePool pool;
+  protected HTableInterface table;
+  protected HTableDescriptor descriptor;
+
+  protected volatile Queue<StreamsDatum> persistQueue;
+
+  private ObjectMapper mapper = new ObjectMapper();
+
+  private HbaseConfiguration config;
+
+  /**
+   * HbasePersistWriter constructor - resolve HbaseConfiguration from JVM 'hbase'.
+   */
+  public HbasePersistWriter() {
+    this.config = new ComponentConfigurator<>(HbaseConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("hbase"));
+    this.persistQueue  = new ConcurrentLinkedQueue<>();
+  }
+
+  /**
+   * HbasePersistWriter constructor - use supplied persistQueue.
+   * @param persistQueue persistQueue
+   */
+  // TODO: refactor this to use HbaseConfiguration
+  public HbasePersistWriter(Queue<StreamsDatum> persistQueue) {
+    this.config = new ComponentConfigurator<>(HbaseConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("hbase"));
+    this.persistQueue = persistQueue;
+  }
+
+  private synchronized void connectToHbase() {
+
+    // TODO: refactor this to resolve this stuff from typesafe
+    Configuration configuration = new Configuration();
+    configuration.set("hbase.rootdir", config.getRootdir());
+    configuration.set("zookeeper.znode.parent", config.getParent());
+    configuration.set("zookeeper.znode.rootserver", config.getRootserver());
+    //configuration.set("hbase.master", config.getRootserver());
+    //configuration.set("hbase.cluster.distributed", "false");
+    configuration.set("hbase.zookeeper.quorum", config.getQuorum());
+    configuration.set("hbase.zookeeper.property.clientPort", Long.toString(config.getClientPort()));
+    configuration.setInt("zookeeper.session.timeout", 1000);
+
+    configuration.setInt("timeout", 1000);
+
+    //pool = new HTablePool(configuration, 10);
+    try {
+      connection = HConnectionManager.createConnection(configuration);
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      return;
     }
 
-    public HbasePersistWriter(Queue<StreamsDatum> persistQueue) {
-        this.config = new ComponentConfigurator<>(HbaseConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("hbase"));
-        this.persistQueue = persistQueue;
+    try {
+      //    table = new HTable(configuration, config.getTable());
+      //    table = (HTable) pool.getTable(config.getTable());
+      table = new HTable(configuration, config.getTable().getBytes());
+      table.setAutoFlush(true);
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      return;
     }
+    //
 
-    private synchronized void connectToHbase()
-    {
-        Configuration configuration = new Configuration();
-        configuration.set("hbase.rootdir", config.getRootdir());
-        configuration.set("zookeeper.znode.parent", config.getParent());
-        configuration.set("zookeeper.znode.rootserver", config.getRootserver());
-        //configuration.set("hbase.master", config.getRootserver());
-        //configuration.set("hbase.cluster.distributed", "false");
-        configuration.set("hbase.zookeeper.quorum", config.getQuorum());
-        configuration.set("hbase.zookeeper.property.clientPort", Long.toString(config.getClientPort()));
-        configuration.setInt("zookeeper.session.timeout", 1000);
-
-        configuration.setInt("timeout", 1000);
-
-        //pool = new HTablePool(configuration, 10);
-        try {
-            connection = HConnectionManager.createConnection(configuration);
-        } catch (Exception e) {
-            e.printStackTrace();
-            return;
-        }
-
-        try {
-        //    table = new HTable(configuration, config.getTable());
-        //    table = (HTable) pool.getTable(config.getTable());
-            table = new HTable(configuration, config.getTable().getBytes());
-            table.setAutoFlush(true);
-        } catch (Exception e) {
-            e.printStackTrace();
-            return;
-        }
-        //
-
-        try {
-            descriptor = table.getTableDescriptor();
-        } catch (Exception e) {
-            e.printStackTrace();
-            return;
-        }
-
-        try
-        {
-            LOGGER.info("Table : {}", descriptor);
-        }
-        catch (Exception e)
-        {
-            LOGGER.error("There was an error connecting to HBase, please check your settings and try again");
-            e.printStackTrace();
-        }
+    try {
+      descriptor = table.getTableDescriptor();
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      return;
     }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+    try {
+      LOGGER.info("Table : {}", descriptor);
+    } catch (Exception ex) {
+      LOGGER.error("There was an error connecting to HBase, please check your settings and try again");
+      ex.printStackTrace();
     }
-
-    @Override
-    public void write(StreamsDatum streamsDatum) {
-
-        ObjectNode node;
-        Put put = new Put();
-        if( streamsDatum.getDocument() instanceof String ) {
-            try {
-                node = mapper.readValue((String)streamsDatum.getDocument(), ObjectNode.class);
-            } catch (IOException e) {
-                e.printStackTrace();
-                LOGGER.warn("Invalid json: {}", streamsDatum.getDocument().toString());
-                return;
-            }
-            put.setId(GuidUtils.generateGuid(node.toString()));
-            try {
-                byte[] value = node.binaryValue();
-                put.add(config.getFamily().getBytes(), config.getQualifier().getBytes(), value);
-            } catch (IOException e) {
-                e.printStackTrace();
-                LOGGER.warn("Failure adding object: {}", streamsDatum.getDocument().toString());
-                return;
-            }
-        } else {
-            try {
-                node = mapper.valueToTree(streamsDatum.getDocument());
-            } catch (Exception e) {
-                e.printStackTrace();
-                LOGGER.warn("Invalid json: {}", streamsDatum.getDocument().toString());
-                return;
-            }
-            put.setId(GuidUtils.generateGuid(node.toString()));
-            try {
-                byte[] value = node.binaryValue();
-                put.add(config.getFamily().getBytes(), config.getQualifier().getBytes(), value);
-            } catch (IOException e) {
-                e.printStackTrace();
-                LOGGER.warn("Failure preparing put: {}", streamsDatum.getDocument().toString());
-                return;
-            }
-        }
-        try {
-            table.put(put);
-        } catch (IOException e) {
-            e.printStackTrace();
-            LOGGER.warn("Failure executin put: {}", streamsDatum.getDocument().toString());
-        }
-
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void write(StreamsDatum streamsDatum) {
+
+    ObjectNode node;
+    Put put = new Put();
+    if ( streamsDatum.getDocument() instanceof String ) {
+      try {
+        node = mapper.readValue((String)streamsDatum.getDocument(), ObjectNode.class);
+      } catch (IOException ex) {
+        ex.printStackTrace();
+        LOGGER.warn("Invalid json: {}", streamsDatum.getDocument().toString());
+        return;
+      }
+      put.setId(GuidUtils.generateGuid(node.toString()));
+      try {
+        byte[] value = node.binaryValue();
+        put.add(config.getFamily().getBytes(), config.getQualifier().getBytes(), value);
+      } catch (IOException ex) {
+        ex.printStackTrace();
+        LOGGER.warn("Failure adding object: {}", streamsDatum.getDocument().toString());
+        return;
+      }
+    } else {
+      try {
+        node = mapper.valueToTree(streamsDatum.getDocument());
+      } catch (Exception ex) {
+        ex.printStackTrace();
+        LOGGER.warn("Invalid json: {}", streamsDatum.getDocument().toString());
+        return;
+      }
+      put.setId(GuidUtils.generateGuid(node.toString()));
+      try {
+        byte[] value = node.binaryValue();
+        put.add(config.getFamily().getBytes(), config.getQualifier().getBytes(), value);
+      } catch (IOException ex) {
+        ex.printStackTrace();
+        LOGGER.warn("Failure preparing put: {}", streamsDatum.getDocument().toString());
+        return;
+      }
     }
-
-    public void flush() throws IOException
-    {
-        table.flushCommits();
+    try {
+      table.put(put);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      LOGGER.warn("Failure executin put: {}", streamsDatum.getDocument().toString());
     }
 
-    public synchronized void close() throws IOException
-    {
-        table.close();
-    }
+  }
+
+  public void flush() throws IOException {
+    table.flushCommits();
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  public synchronized void close() throws IOException {
+    table.close();
+  }
 
-        connectToHbase();
+  @Override
+  public void prepare(Object configurationObject) {
 
-        Thread task = new Thread(new HbasePersistWriterTask(this));
-        task.start();
+    connectToHbase();
 
-        try {
-            task.join();
-        } catch (InterruptedException e) {
-            e.printStackTrace();
-        }
+    Thread task = new Thread(new HbasePersistWriterTask(this));
+    task.start();
 
+    try {
+      task.join();
+    } catch (InterruptedException ex) {
+      ex.printStackTrace();
     }
 
-    @Override
-    public void cleanUp() {
+  }
 
-        try {
-            flush();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-        try {
-            close();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
+  @Override
+  public void cleanUp() {
 
+    try {
+      flush();
+    } catch (IOException ex) {
+      ex.printStackTrace();
     }
+    try {
+      close();
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    }
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriterTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriterTask.java b/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriterTask.java
index 19a398d..eef7004 100644
--- a/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriterTask.java
+++ b/streams-contrib/streams-persist-hbase/src/main/java/org/apache/streams/hbase/HbasePersistWriterTask.java
@@ -19,38 +19,45 @@
 package org.apache.streams.hbase;
 
 import org.apache.streams.core.StreamsDatum;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Random;
 
+/**
+ * HbasePersistWriterTask writes to hbase on behalf of
+ * @see org.apache.streams.hbase.HbasePersistWriter
+ */
 public class HbasePersistWriterTask implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(HbasePersistWriterTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(HbasePersistWriterTask.class);
 
-    private HbasePersistWriter writer;
+  private HbasePersistWriter writer;
 
-    public HbasePersistWriterTask(HbasePersistWriter writer) {
-        this.writer = writer;
-    }
+  public HbasePersistWriterTask(HbasePersistWriter writer) {
+    this.writer = writer;
+  }
 
-    @Override
-    public void run() {
-
-        while(true) {
-            if( writer.persistQueue.peek() != null ) {
-                try {
-                    StreamsDatum entry = writer.persistQueue.remove();
-                    writer.write(entry);
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-            try {
-                Thread.sleep(new Random().nextInt(1));
-            } catch (InterruptedException ignored) {}
-        }
+  @Override
+  public void run() {
 
+    while (true) {
+      if ( writer.persistQueue.peek() != null ) {
+        try {
+          StreamsDatum entry = writer.persistQueue.remove();
+          writer.write(entry);
+        } catch (Exception ex) {
+          ex.printStackTrace();
+        }
+      }
+      try {
+        Thread.sleep(new Random().nextInt(1));
+      } catch (InterruptedException ignored) {
+        LOGGER.trace("ignored InterruptedException", ignored);
+      }
     }
 
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/HdfsConstants.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/HdfsConstants.java b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/HdfsConstants.java
index e96cc8d..b6660ab 100644
--- a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/HdfsConstants.java
+++ b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/HdfsConstants.java
@@ -19,13 +19,13 @@
 package org.apache.streams.hdfs;
 
 /**
- * Predefined field symbols for streams-persist-hdfs
+ * Predefined field symbols for streams-persist-hdfs.
  */
 public class HdfsConstants {
 
-    protected static final String ID = "ID";
-    protected static final String TS = "TS";
-    protected static final String META = "META";
-    protected static final String DOC = "DOC";
+  protected static final String ID = "ID";
+  protected static final String TS = "TS";
+  protected static final String META = "META";
+  protected static final String DOC = "DOC";
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReader.java b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReader.java
index 24c9737..d254abb 100644
--- a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReader.java
+++ b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReader.java
@@ -18,16 +18,6 @@
 
 package org.apache.streams.hdfs;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Queues;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
@@ -38,6 +28,18 @@ import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistReader;
 import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Queues;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -56,220 +58,244 @@ import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
 
 /**
- * Created by sblackmon on 2/28/14.
+ * WebHdfsPersistReader reads from hdfs.
  */
 public class WebHdfsPersistReader implements StreamsPersistReader, DatumStatusCountable {
 
-    public final static String STREAMS_ID = "WebHdfsPersistReader";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistReader.class);
-
-    protected final static char DELIMITER = '\t';
-
-    protected FileSystem client;
-    protected Path path;
-    protected FileStatus[] status;
-
-    protected volatile Queue<StreamsDatum> persistQueue;
-
-    protected ObjectMapper mapper;
-    protected LineReadWriteUtil lineReaderUtil;
-
-    protected HdfsReaderConfiguration hdfsConfiguration;
-    protected StreamsConfiguration streamsConfiguration;
-
-    private ExecutorService executor;
-
-    protected DatumStatusCounter countersTotal = new DatumStatusCounter();
-    protected DatumStatusCounter countersCurrent = new DatumStatusCounter();
-    private Future<?> task;
-
-    public WebHdfsPersistReader() {
-        this(new ComponentConfigurator<>(HdfsReaderConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("hdfs")));
+  public static final String STREAMS_ID = "WebHdfsPersistReader";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistReader.class);
+
+  protected static final char DELIMITER = '\t';
+
+  protected FileSystem client;
+  protected Path path;
+  protected FileStatus[] status;
+
+  protected volatile Queue<StreamsDatum> persistQueue;
+
+  protected ObjectMapper mapper;
+  protected LineReadWriteUtil lineReaderUtil;
+
+  protected HdfsReaderConfiguration hdfsConfiguration;
+  protected StreamsConfiguration streamsConfiguration;
+
+  private ExecutorService executor;
+
+  protected DatumStatusCounter countersTotal = new DatumStatusCounter();
+  protected DatumStatusCounter countersCurrent = new DatumStatusCounter();
+  private Future<?> task;
+
+  /**
+   * WebHdfsPersistReader constructor - resolves HdfsReaderConfiguration from JVM 'hdfs'.
+   */
+  public WebHdfsPersistReader() {
+    this(new ComponentConfigurator<>(HdfsReaderConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("hdfs")));
+  }
+
+  /**
+   * WebHdfsPersistReader constructor - uses supplied HdfsReaderConfiguration.
+   * @param hdfsConfiguration hdfsConfiguration
+   */
+  public WebHdfsPersistReader(HdfsReaderConfiguration hdfsConfiguration) {
+    this.hdfsConfiguration = hdfsConfiguration;
+  }
+
+  /**
+   * getURI from hdfsConfiguration.
+   * @return URI
+   * @throws URISyntaxException URISyntaxException
+   */
+  public URI getURI() throws URISyntaxException {
+    StringBuilder uriBuilder = new StringBuilder();
+    uriBuilder.append(hdfsConfiguration.getScheme());
+    uriBuilder.append("://");
+    if ( !Strings.isNullOrEmpty(hdfsConfiguration.getHost())) {
+      uriBuilder.append(hdfsConfiguration.getHost());
+      if (hdfsConfiguration.getPort() != null) {
+        uriBuilder.append(":" + hdfsConfiguration.getPort());
+      }
+    } else {
+      uriBuilder.append("/");
     }
-
-    public WebHdfsPersistReader(HdfsReaderConfiguration hdfsConfiguration) {
-        this.hdfsConfiguration = hdfsConfiguration;
-    }
-
-    public URI getURI() throws URISyntaxException {
-        StringBuilder uriBuilder = new StringBuilder();
-        uriBuilder.append(hdfsConfiguration.getScheme());
-        uriBuilder.append("://");
-        if( !Strings.isNullOrEmpty(hdfsConfiguration.getHost())) {
-            uriBuilder.append(hdfsConfiguration.getHost());
-            if (hdfsConfiguration.getPort() != null)
-                uriBuilder.append(":" + hdfsConfiguration.getPort());
-        } else {
-            uriBuilder.append("/");
-        }
-        return new URI(uriBuilder.toString());
+    return new URI(uriBuilder.toString());
+  }
+
+  /**
+   * isConnected.
+   * @return true if connected, false otherwise
+   */
+  public boolean isConnected() {
+    return (client != null);
+  }
+
+  /**
+   * getFileSystem.
+   * @return FileSystem
+   */
+  public final synchronized FileSystem getFileSystem() {
+    // Check to see if we are connected.
+    if (!isConnected()) {
+      connectToWebHDFS();
     }
-
-    public boolean isConnected() 		                { return (client != null); }
-
-    public final synchronized FileSystem getFileSystem()
-    {
-        // Check to see if we are connected.
-        if(!isConnected())
-            connectToWebHDFS();
-        return this.client;
-    }
-
-    private synchronized void connectToWebHDFS()
-    {
-        try
-        {
-            LOGGER.info("User : {}", this.hdfsConfiguration.getUser());
-            UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser());
-            ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE);
-
-            ugi.doAs(new PrivilegedExceptionAction<Void>() {
-                public Void run() throws Exception {
-                    Configuration conf = new Configuration();
-                    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-                    conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
-                    conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
-                    LOGGER.info("WebURI : {}", getURI().toString());
-                    client = FileSystem.get(getURI(), conf);
-                    LOGGER.info("Connected to WebHDFS");
-
-                    /*
-                    * ************************************************************************************************
-                    * This code is an example of how you would work with HDFS and you weren't going over
-                    * the webHDFS protocol.
-                    *
-                    * Smashew: 2013-10-01
-                    * ************************************************************************************************
-                    conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName);
-                    conf.set("namenode.host","0.0.0.0");
-                    conf.set("hadoop.job.ugi", userName);
-                    conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner");
-                    fileSystem.createNewFile(new Path("/user/"+ userName + "/test"));
-                    FileStatus[] status = fs.listStatus(new Path("/user/" + userName));
-                    for(int i=0;i<status.length;i++)
-                    {
-                        LOGGER.info("Directory: {}", status[i].getPath());
-                    }
-                    */
-                    return null;
-                }
-            });
+    return this.client;
+  }
+
+  // TODO: combine with WebHdfsPersistReader.connectToWebHDFS
+  private synchronized void connectToWebHDFS() {
+    try {
+      LOGGER.info("User : {}", this.hdfsConfiguration.getUser());
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser());
+      ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE);
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+        public Void run() throws Exception {
+          Configuration conf = new Configuration();
+          conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+          conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
+          conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
+          LOGGER.info("WebURI : {}", getURI().toString());
+          client = FileSystem.get(getURI(), conf);
+          LOGGER.info("Connected to WebHDFS");
+
+          /*
+          * ************************************************************************************************
+          * This code is an example of how you would work with HDFS and you weren't going over
+          * the webHDFS protocol.
+          *
+          * Smashew: 2013-10-01
+          * ************************************************************************************************
+          conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName);
+          conf.set("namenode.host","0.0.0.0");
+          conf.set("hadoop.job.ugi", userName);
+          conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner");
+          fileSystem.createNewFile(new Path("/user/"+ userName + "/test"));
+          FileStatus[] status = fs.listStatus(new Path("/user/" + userName));
+          for(int i=0;i<status.length;i++)
+          {
+              LOGGER.info("Directory: {}", status[i].getPath());
+          }
+          */
+          return null;
         }
-        catch (Exception e)
-        {
-            LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again");
-            e.printStackTrace();
-        }
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        LOGGER.debug("Prepare");
-        lineReaderUtil = LineReadWriteUtil.getInstance(hdfsConfiguration);
-        connectToWebHDFS();
-        String pathString = hdfsConfiguration.getPath() + "/" + hdfsConfiguration.getReaderPath();
-        LOGGER.info("Path : {}", pathString);
-        path = new Path(pathString);
-        try {
-            if( client.isFile(path)) {
-                LOGGER.info("Found File");
-                FileStatus fileStatus = client.getFileStatus(path);
-                status = new FileStatus[1];
-                status[0] = fileStatus;
-            } else if( client.isDirectory(path)){
-                status = client.listStatus(path);
-                List<FileStatus> statusList = Lists.newArrayList(status);
-                Collections.sort(statusList);
-                status = statusList.toArray(new FileStatus[0]);
-                LOGGER.info("Found Directory : {} files", status.length);
-            } else {
-                LOGGER.error("Neither file nor directory, wtf");
-            }
-        } catch (IOException e) {
-            LOGGER.error("IOException", e);
-        }
-        streamsConfiguration = StreamsConfigurator.detectConfiguration();
-        persistQueue = Queues.synchronizedQueue(new LinkedBlockingQueue<StreamsDatum>(streamsConfiguration.getBatchSize().intValue()));
-        //persistQueue = Queues.synchronizedQueue(new ConcurrentLinkedQueue());
-        executor = Executors.newSingleThreadExecutor();
-        mapper = StreamsJacksonMapper.getInstance();
-    }
-
-    @Override
-    public void cleanUp() {
-
+      });
+    } catch (Exception ex) {
+      LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again");
+      ex.printStackTrace();
     }
-
-    @Override
-    public StreamsResultSet readAll() {
-        WebHdfsPersistReaderTask readerTask = new WebHdfsPersistReaderTask(this);
-        Thread readerThread = new Thread(readerTask);
-        readerThread.start();
-        try {
-            readerThread.join();
-        } catch (InterruptedException e) {}
-        return new StreamsResultSet(persistQueue);
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    LOGGER.debug("Prepare");
+    lineReaderUtil = LineReadWriteUtil.getInstance(hdfsConfiguration);
+    connectToWebHDFS();
+    String pathString = hdfsConfiguration.getPath() + "/" + hdfsConfiguration.getReaderPath();
+    LOGGER.info("Path : {}", pathString);
+    path = new Path(pathString);
+    try {
+      if ( client.isFile(path)) {
+        LOGGER.info("Found File");
+        FileStatus fileStatus = client.getFileStatus(path);
+        status = new FileStatus[1];
+        status[0] = fileStatus;
+      } else if ( client.isDirectory(path)) {
+        status = client.listStatus(path);
+        List<FileStatus> statusList = Lists.newArrayList(status);
+        Collections.sort(statusList);
+        status = statusList.toArray(new FileStatus[0]);
+        LOGGER.info("Found Directory : {} files", status.length);
+      } else {
+        LOGGER.error("Neither file nor directory, wtf");
+      }
+    } catch (IOException ex) {
+      LOGGER.error("IOException", ex);
     }
-
-    @Override
-    public void startStream() {
-        LOGGER.debug("startStream");
-        task = executor.submit(new WebHdfsPersistReaderTask(this));
+    streamsConfiguration = StreamsConfigurator.detectConfiguration();
+    persistQueue = Queues.synchronizedQueue(new LinkedBlockingQueue<StreamsDatum>(streamsConfiguration.getBatchSize().intValue()));
+    //persistQueue = Queues.synchronizedQueue(new ConcurrentLinkedQueue());
+    executor = Executors.newSingleThreadExecutor();
+    mapper = StreamsJacksonMapper.getInstance();
+  }
+
+  @Override
+  public void cleanUp() {
+
+  }
+
+  @Override
+  public StreamsResultSet readAll() {
+    WebHdfsPersistReaderTask readerTask = new WebHdfsPersistReaderTask(this);
+    Thread readerThread = new Thread(readerTask);
+    readerThread.start();
+    try {
+      readerThread.join();
+    } catch (InterruptedException ignored) {
+      LOGGER.trace("ignored InterruptedException", ignored);
     }
-
-    @Override
-    public StreamsResultSet readCurrent() {
-
-        StreamsResultSet current;
-
-        synchronized( WebHdfsPersistReader.class ) {
-            current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(persistQueue));
-            current.setCounter(new DatumStatusCounter());
-            current.getCounter().add(countersCurrent);
-            countersTotal.add(countersCurrent);
-            countersCurrent = new DatumStatusCounter();
-            persistQueue.clear();
-        }
-
-        return current;
+    return new StreamsResultSet(persistQueue);
+  }
+
+  @Override
+  public void startStream() {
+    LOGGER.debug("startStream");
+    task = executor.submit(new WebHdfsPersistReaderTask(this));
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+
+    StreamsResultSet current;
+
+    synchronized ( WebHdfsPersistReader.class ) {
+      current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(persistQueue));
+      current.setCounter(new DatumStatusCounter());
+      current.getCounter().add(countersCurrent);
+      countersTotal.add(countersCurrent);
+      countersCurrent = new DatumStatusCounter();
+      persistQueue.clear();
     }
 
-    protected void write( StreamsDatum entry ) {
-        boolean success;
-        do {
-            synchronized( WebHdfsPersistReader.class ) {
-                success = persistQueue.offer(entry);
-            }
-            Thread.yield();
-        }
-        while( !success );
-    }
+    return current;
+  }
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
+  protected void write( StreamsDatum entry ) {
+    boolean success;
+    do {
+      synchronized ( WebHdfsPersistReader.class ) {
+        success = persistQueue.offer(entry);
+      }
+      Thread.yield();
     }
-
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
+    while ( !success );
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @Override
+  public boolean isRunning() {
+    if ( task != null) {
+      return !task.isDone() && !task.isCancelled();
+    } else {
+      return true;
     }
+  }
 
-    @Override
-    public boolean isRunning() {
-        if( task != null)
-            return !task.isDone() && !task.isCancelled();
-        else return true;
-    }
-
-    @Override
-    public DatumStatusCounter getDatumStatusCounter() {
-        return countersTotal;
-    }
+  @Override
+  public DatumStatusCounter getDatumStatusCounter() {
+    return countersTotal;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReaderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReaderTask.java b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReaderTask.java
index c5c1ffe..5bff080 100644
--- a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReaderTask.java
+++ b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistReaderTask.java
@@ -18,98 +18,100 @@
 
 package org.apache.streams.hdfs;
 
-import com.fasterxml.jackson.databind.JsonNode;
+import org.apache.streams.core.DatumStatus;
+import org.apache.streams.core.StreamsDatum;
+
 import com.google.common.base.Strings;
-import com.google.common.collect.Maps;
 import com.google.common.util.concurrent.Uninterruptibles;
+
 import org.apache.hadoop.fs.FileStatus;
-import org.apache.streams.converter.LineReadWriteUtil;
-import org.apache.streams.core.DatumStatus;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.BufferedReader;
-import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
-import java.util.Map;
 import java.util.concurrent.TimeUnit;
 import java.util.zip.GZIPInputStream;
 
+/**
+ * WebHdfsPersistReaderTask reads from hdfs on behalf of
+ * @see org.apache.streams.hdfs.WebHdfsPersistReader
+ */
 public class WebHdfsPersistReaderTask implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistReaderTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistReaderTask.class);
 
-    private WebHdfsPersistReader reader;
+  private WebHdfsPersistReader reader;
 
-    public WebHdfsPersistReaderTask(WebHdfsPersistReader reader) {
-        this.reader = reader;
-    }
+  public WebHdfsPersistReaderTask(WebHdfsPersistReader reader) {
+    this.reader = reader;
+  }
+
+  @Override
+  public void run() {
 
-    @Override
-    public void run() {
+    LOGGER.info("WebHdfsPersistReaderTask: files to process");
 
-        LOGGER.info("WebHdfsPersistReaderTask: files to process");
+    for ( FileStatus fileStatus : reader.status ) {
+      LOGGER.info("    " + fileStatus.getPath().getName());
+    }
 
-        for( FileStatus fileStatus : reader.status ) {
-            LOGGER.info("    " + fileStatus.getPath().getName());
+    for ( FileStatus fileStatus : reader.status ) {
+      InputStream inputStream;
+      InputStreamReader inputStreamReader;
+      BufferedReader bufferedReader;
+      if ( fileStatus.isFile() && !fileStatus.getPath().getName().startsWith("_")) {
+        HdfsWriterConfiguration.Compression compression = HdfsWriterConfiguration.Compression.NONE;
+        if ( fileStatus.getPath().getName().endsWith(".gz")) {
+          compression = HdfsWriterConfiguration.Compression.GZIP;
+        }
+        LOGGER.info("Started Processing: {} Encoding: {} Compression: {}", fileStatus.getPath().getName(), reader.hdfsConfiguration.getEncoding(), compression.toString());
+        try {
+          inputStream = reader.client.open(fileStatus.getPath());
+          if ( compression.equals(HdfsWriterConfiguration.Compression.GZIP)) {
+            inputStream = new GZIPInputStream(inputStream);
+          }
+          inputStreamReader = new InputStreamReader(inputStream, reader.hdfsConfiguration.getEncoding());
+          bufferedReader = new BufferedReader(inputStreamReader);
+        } catch (Exception ex) {
+          LOGGER.error("Exception Opening " + fileStatus.getPath(), ex.getMessage());
+          return;
         }
 
-        for( FileStatus fileStatus : reader.status ) {
-            InputStream inputStream;
-            InputStreamReader inputStreamReader;
-            BufferedReader bufferedReader;
-            if( fileStatus.isFile() && !fileStatus.getPath().getName().startsWith("_")) {
-                HdfsWriterConfiguration.Compression compression = HdfsWriterConfiguration.Compression.NONE;
-                if( fileStatus.getPath().getName().endsWith(".gz"))
-                    compression = HdfsWriterConfiguration.Compression.GZIP;
-                LOGGER.info("Started Processing: {} Encoding: {} Compression: {}", fileStatus.getPath().getName(), reader.hdfsConfiguration.getEncoding(), compression.toString());
-                try {
-                    inputStream = reader.client.open(fileStatus.getPath());
-                    if( compression.equals(HdfsWriterConfiguration.Compression.GZIP))
-                        inputStream = new GZIPInputStream(inputStream);
-                    inputStreamReader = new InputStreamReader(inputStream, reader.hdfsConfiguration.getEncoding());
-                    bufferedReader = new BufferedReader(inputStreamReader);
-                } catch (Exception e) {
-                    LOGGER.error("Exception Opening " + fileStatus.getPath(), e.getMessage());
-                    return;
-                }
-
-                String line = "";
-                do{
-                    try {
-                        line = bufferedReader.readLine();
-                        if( !Strings.isNullOrEmpty(line) ) {
-                            reader.countersCurrent.incrementAttempt();
-                            StreamsDatum entry = reader.lineReaderUtil.processLine(line);
-                            if( entry != null ) {
-                                reader.write(entry);
-                                reader.countersCurrent.incrementStatus(DatumStatus.SUCCESS);
-                            } else {
-                                LOGGER.warn("processLine failed");
-                                reader.countersCurrent.incrementStatus(DatumStatus.FAIL);
-                            }
-                        }
-                    } catch (Exception e) {
-                        LOGGER.warn("WebHdfsPersistReader readLine Exception: {}", e);
-                        reader.countersCurrent.incrementStatus(DatumStatus.FAIL);
-                    }
-                } while( !Strings.isNullOrEmpty(line) );
-                LOGGER.info("Finished Processing " + fileStatus.getPath().getName());
-                try {
-                    bufferedReader.close();
-                } catch (Exception e) {
-                    LOGGER.error("WebHdfsPersistReader Exception: {}", e);
-                }
+        String line = "";
+        do {
+          try {
+            line = bufferedReader.readLine();
+            if ( !Strings.isNullOrEmpty(line) ) {
+              reader.countersCurrent.incrementAttempt();
+              StreamsDatum entry = reader.lineReaderUtil.processLine(line);
+              if ( entry != null ) {
+                reader.write(entry);
+                reader.countersCurrent.incrementStatus(DatumStatus.SUCCESS);
+              } else {
+                LOGGER.warn("processLine failed");
+                reader.countersCurrent.incrementStatus(DatumStatus.FAIL);
+              }
             }
+          } catch (Exception ex) {
+            LOGGER.warn("WebHdfsPersistReader readLine Exception: {}", ex);
+            reader.countersCurrent.incrementStatus(DatumStatus.FAIL);
+          }
         }
+        while ( !Strings.isNullOrEmpty(line) );
+        LOGGER.info("Finished Processing " + fileStatus.getPath().getName());
+        try {
+          bufferedReader.close();
+        } catch (Exception ex) {
+          LOGGER.error("WebHdfsPersistReader Exception: {}", ex);
+        }
+      }
+    }
 
-        LOGGER.info("WebHdfsPersistReaderTask Finished");
+    LOGGER.info("WebHdfsPersistReaderTask Finished");
 
-        Uninterruptibles.sleepUninterruptibly(15, TimeUnit.SECONDS);
-    }
+    Uninterruptibles.sleepUninterruptibly(15, TimeUnit.SECONDS);
+  }
 
 }


[34/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriter.java b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriter.java
index 492eccb..4554c0f 100644
--- a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriter.java
+++ b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriter.java
@@ -18,13 +18,6 @@
 
 package org.apache.streams.hdfs;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.converter.LineReadWriteUtil;
@@ -34,6 +27,15 @@ import org.apache.streams.core.DatumStatusCounter;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Strings;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -50,249 +52,278 @@ import java.util.List;
 import java.util.Queue;
 import java.util.zip.GZIPOutputStream;
 
+/**
+ * WebHdfsPersistWriter writes to hdfs.
+ */
 public class WebHdfsPersistWriter implements StreamsPersistWriter, Flushable, Closeable, DatumStatusCountable {
 
-    public final static String STREAMS_ID = "WebHdfsPersistWriter";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistWriter.class);
-
-    private FileSystem client;
-    private Path path;
-    private int linesPerFile;
-    private int totalRecordsWritten = 0;
-    private final List<Path> writtenFiles = new ArrayList<Path>();
-    private int fileLineCounter = 0;
-    private OutputStreamWriter currentWriter = null;
-
-    private static final int BYTES_IN_MB = 1024 * 1024;
-    private static final int BYTES_BEFORE_FLUSH = 64 * BYTES_IN_MB;
-    private volatile int totalByteCount = 0;
-    private volatile int byteCount = 0;
-
-    public boolean terminate = false;
-
-    protected volatile Queue<StreamsDatum> persistQueue;
-
-    private ObjectMapper mapper;
-    private LineReadWriteUtil lineWriterUtil;
-
-    protected HdfsWriterConfiguration hdfsConfiguration;
-
-    public WebHdfsPersistWriter() {
-        this(new ComponentConfigurator<>(HdfsWriterConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("hdfs")));
-    }
-
-    public WebHdfsPersistWriter(HdfsWriterConfiguration hdfsConfiguration) {
-        this.hdfsConfiguration = hdfsConfiguration;
-        this.linesPerFile = hdfsConfiguration.getLinesPerFile().intValue();
+  public static final String STREAMS_ID = "WebHdfsPersistWriter";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistWriter.class);
+
+  private FileSystem client;
+  private Path path;
+  private int linesPerFile;
+  private int totalRecordsWritten = 0;
+  private final List<Path> writtenFiles = new ArrayList<Path>();
+  private int fileLineCounter = 0;
+  private OutputStreamWriter currentWriter = null;
+
+  private static final int BYTES_IN_MB = 1024 * 1024;
+  private static final int BYTES_BEFORE_FLUSH = 64 * BYTES_IN_MB;
+  private volatile int totalByteCount = 0;
+  private volatile int byteCount = 0;
+
+  public boolean terminate = false;
+
+  protected volatile Queue<StreamsDatum> persistQueue;
+
+  private ObjectMapper mapper;
+  private LineReadWriteUtil lineWriterUtil;
+
+  protected HdfsWriterConfiguration hdfsConfiguration;
+
+  public WebHdfsPersistWriter() {
+    this(new ComponentConfigurator<>(HdfsWriterConfiguration.class).detectConfiguration(StreamsConfigurator.getConfig().getConfig("hdfs")));
+  }
+
+  public WebHdfsPersistWriter(HdfsWriterConfiguration hdfsConfiguration) {
+    this.hdfsConfiguration = hdfsConfiguration;
+    this.linesPerFile = hdfsConfiguration.getLinesPerFile().intValue();
+  }
+
+  /**
+   * getURI from hdfsConfiguration.
+   * @return URI
+   * @throws URISyntaxException URISyntaxException
+   */
+  // TODO: combine with WebHdfsPersistReader.getURI
+  public URI getURI() throws URISyntaxException {
+    StringBuilder uriBuilder = new StringBuilder();
+    uriBuilder.append(hdfsConfiguration.getScheme());
+    uriBuilder.append("://");
+    if ( !Strings.isNullOrEmpty(hdfsConfiguration.getHost())) {
+      uriBuilder.append(hdfsConfiguration.getHost() + ":" + hdfsConfiguration.getPort());
+    } else {
+      uriBuilder.append("/");
     }
-
-    public URI getURI() throws URISyntaxException {
-        StringBuilder uriBuilder = new StringBuilder();
-        uriBuilder.append(hdfsConfiguration.getScheme());
-        uriBuilder.append("://");
-        if( !Strings.isNullOrEmpty(hdfsConfiguration.getHost()))
-            uriBuilder.append(hdfsConfiguration.getHost() + ":" + hdfsConfiguration.getPort());
-        else
-            uriBuilder.append("/");
-        return new URI(uriBuilder.toString());
+    return new URI(uriBuilder.toString());
+  }
+
+  /**
+   * isConnected.
+   * @return true if connected, false otherwise
+   */
+  // TODO: combine with WebHdfsPersistReader.isConnected
+  public boolean isConnected() {
+    return (client != null);
+  }
+
+  /**
+   * getFileSystem.
+   * @return FileSystem
+   */
+  // TODO: combine with WebHdfsPersistReader.getFileSystem
+  public final synchronized FileSystem getFileSystem() {
+    // Check to see if we are connected.
+    if (!isConnected()) {
+      connectToWebHDFS();
     }
-
-    public boolean isConnected() {
-        return (client != null);
-    }
-
-    public final synchronized FileSystem getFileSystem() {
-        // Check to see if we are connected.
-        if (!isConnected())
-            connectToWebHDFS();
-        return this.client;
-    }
-
-    private synchronized void connectToWebHDFS() {
-        try {
-            LOGGER.info("User : {}", this.hdfsConfiguration.getUser());
-            UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser());
-            ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE);
-
-            ugi.doAs(new PrivilegedExceptionAction<Void>() {
-                public Void run() throws Exception {
-                    Configuration conf = new Configuration();
-                    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-                    LOGGER.info("WebURI : {}", getURI().toString());
-                    client = FileSystem.get(getURI(), conf);
-                    LOGGER.info("Connected to WebHDFS");
-
-                    /*
-                    * ************************************************************************************************
-                    * This code is an example of how you would work with HDFS and you weren't going over
-                    * the webHDFS protocol.
-                    *
-                    * Smashew: 2013-10-01
-                    * ************************************************************************************************
-                    conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName);
-                    conf.set("namenode.host","0.0.0.0");
-                    conf.set("hadoop.job.ugi", userName);
-                    conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner");
-                    fileSystem.createNewFile(new Path("/user/"+ userName + "/test"));
-                    FileStatus[] status = fs.listStatus(new Path("/user/" + userName));
-                    for(int i=0;i<status.length;i++)
-                    {
-                        LOGGER.info("Directory: {}", status[i].getPath());
-                    }
-                    */
-                    return null;
-                }
-            });
-        } catch (Exception e) {
-            LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again", e);
-            throw new RuntimeException(e);
+    return this.client;
+  }
+
+  private synchronized void connectToWebHDFS() {
+    try {
+      LOGGER.info("User : {}", this.hdfsConfiguration.getUser());
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser());
+      ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE);
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+        public Void run() throws Exception {
+          Configuration conf = new Configuration();
+          conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+          LOGGER.info("WebURI : {}", getURI().toString());
+          client = FileSystem.get(getURI(), conf);
+          LOGGER.info("Connected to WebHDFS");
+
+          /*
+          * ************************************************************************************************
+          * This code is an example of how you would work with HDFS and you weren't going over
+          * the webHDFS protocol.
+          *
+          * Smashew: 2013-10-01
+          * ************************************************************************************************
+          conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName);
+          conf.set("namenode.host","0.0.0.0");
+          conf.set("hadoop.job.ugi", userName);
+          conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner");
+          fileSystem.createNewFile(new Path("/user/"+ userName + "/test"));
+          FileStatus[] status = fs.listStatus(new Path("/user/" + userName));
+          for(int i=0;i<status.length;i++)
+          {
+              LOGGER.info("Directory: {}", status[i].getPath());
+          }
+          */
+
+          return null;
         }
+      });
+    } catch (Exception ex) {
+      LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again", ex);
+      throw new RuntimeException(ex);
     }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public void write(StreamsDatum streamsDatum) {
-
-        synchronized (this) {
-            // Check to see if we need to reset the file that we are currently working with
-            if (this.currentWriter == null || (this.fileLineCounter > this.linesPerFile))
-                resetFile();
-
-            String line = lineWriterUtil.convertResultToString(streamsDatum);
-            writeInternal(line);
-            if( !line.endsWith(this.hdfsConfiguration.getLineDelimiter()))
-                writeInternal(this.hdfsConfiguration.getLineDelimiter());
-            int bytesInLine = line.getBytes().length;
-
-            totalRecordsWritten++;
-            totalByteCount += bytesInLine;
-            byteCount += bytesInLine;
-
-            if (byteCount > BYTES_BEFORE_FLUSH)
-                try {
-                    flush();
-                } catch (IOException e) {
-                    LOGGER.warn("Error flushing to HDFS. Creating a new file and continuing execution.  WARNING: There could be data loss.", e);
-                }
-
-            this.fileLineCounter++;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void write(StreamsDatum streamsDatum) {
+
+    synchronized (this) {
+      // Check to see if we need to reset the file that we are currently working with
+      if (this.currentWriter == null || (this.fileLineCounter > this.linesPerFile)) {
+        resetFile();
+      }
+      String line = lineWriterUtil.convertResultToString(streamsDatum);
+      writeInternal(line);
+      if ( !line.endsWith(this.hdfsConfiguration.getLineDelimiter())) {
+        writeInternal(this.hdfsConfiguration.getLineDelimiter());
+      }
+      int bytesInLine = line.getBytes().length;
+
+      totalRecordsWritten++;
+      totalByteCount += bytesInLine;
+      byteCount += bytesInLine;
+
+      if (byteCount > BYTES_BEFORE_FLUSH) {
+        try {
+          flush();
+        } catch (IOException ex) {
+          LOGGER.warn("Error flushing to HDFS. Creating a new file and continuing execution.  WARNING: There could be data loss.", ex);
         }
+      }
+      this.fileLineCounter++;
     }
-
-    private void writeInternal(String line) {
+  }
+
+  private void writeInternal(String line) {
+    try {
+      this.currentWriter.write(line);
+    } catch (IOException ex) {
+      LOGGER.warn("Error writing to HDFS.  Attempting to try a new file", ex);
+      try {
+        resetFile();
+        this.currentWriter.write(line);
+      } catch (Exception e2) {
+        LOGGER.warn("Failed to write even after creating a new file.  Attempting to reconnect", e2);
         try {
-            this.currentWriter.write(line);
-        } catch (IOException e) {
-            LOGGER.warn("Error writing to HDFS.  Attempting to try a new file", e);
-            try{
-                resetFile();
-                this.currentWriter.write(line);
-            } catch (Exception io) {
-                LOGGER.warn("Failed to write even after creating a new file.  Attempting to reconnect", io);
-                try {
-                    connectToWebHDFS();
-                    resetFile();
-                    this.currentWriter.write(line);
-                } catch (Exception ex) {
-                    LOGGER.error("Failed to write to HDFS after reconnecting client. Terminating writer.", ex);
-                    throw new RuntimeException(e);
-                }
-            }
-
+          connectToWebHDFS();
+          resetFile();
+          this.currentWriter.write(line);
+        } catch (Exception e3) {
+          LOGGER.error("Failed to write to HDFS after reconnecting client. Terminating writer.", e3);
+          throw new RuntimeException(e3);
         }
-    }
+      }
 
-    public void flush() throws IOException {
-        if (this.currentWriter != null && byteCount > BYTES_BEFORE_FLUSH) {
-            this.currentWriter.flush();
-            byteCount = 0;
-        }
     }
+  }
 
-    private synchronized void resetFile() {
-        // this will keep it thread safe, so we don't create too many files
-        if (this.fileLineCounter == 0 && this.currentWriter != null)
-            return;
+  @Override
+  public void flush() throws IOException {
+    if (this.currentWriter != null && byteCount > BYTES_BEFORE_FLUSH) {
+      this.currentWriter.flush();
+      byteCount = 0;
+    }
+  }
 
-        // Create the path for where the file is going to live.
-        Path filePath = this.path.suffix("/" + hdfsConfiguration.getWriterFilePrefix() + "-" + new Date().getTime());
+  private synchronized void resetFile() {
+    // this will keep it thread safe, so we don't create too many files
+    if (this.fileLineCounter == 0 && this.currentWriter != null) {
+      return;
+    }
 
-        if( hdfsConfiguration.getCompression().equals(HdfsWriterConfiguration.Compression.GZIP))
-            filePath = filePath.suffix(".gz");
-        else
-            filePath = filePath.suffix(".tsv");
+    // Create the path for where the file is going to live.
+    Path filePath = this.path.suffix("/" + hdfsConfiguration.getWriterFilePrefix() + "-" + new Date().getTime());
 
-        try {
+    if ( hdfsConfiguration.getCompression().equals(HdfsWriterConfiguration.Compression.GZIP)) {
+      filePath = filePath.suffix(".gz");
+    } else {
+      filePath = filePath.suffix(".tsv");
+    }
 
-            // if there is a current writer, we must close it first.
-            if (this.currentWriter != null) {
-                flush();
-                close();
-            }
+    try {
 
-            this.fileLineCounter = 0;
+      // if there is a current writer, we must close it first.
+      if (this.currentWriter != null) {
+        flush();
+        close();
+      }
 
-            // Check to see if a file of the same name exists, if it does, then we are not going to be able to proceed.
-            if (client.exists(filePath))
-                throw new RuntimeException("Unable to create file: " + filePath);
+      this.fileLineCounter = 0;
 
-            if( hdfsConfiguration.getCompression().equals(HdfsWriterConfiguration.Compression.GZIP))
-                this.currentWriter = new OutputStreamWriter(new GZIPOutputStream(client.create(filePath)));
-            else
-                this.currentWriter = new OutputStreamWriter(client.create(filePath));
+      // Check to see if a file of the same name exists, if it does, then we are not going to be able to proceed.
+      if (client.exists(filePath)) {
+        throw new RuntimeException("Unable to create file: " + filePath);
+      }
 
-            // Add another file to the list of written files.
-            writtenFiles.add(filePath);
+      if ( hdfsConfiguration.getCompression().equals(HdfsWriterConfiguration.Compression.GZIP)) {
+        this.currentWriter = new OutputStreamWriter(new GZIPOutputStream(client.create(filePath)));
+      } else {
+        this.currentWriter = new OutputStreamWriter(client.create(filePath));
+      }
 
-            LOGGER.info("File Created: {}", filePath);
-        } catch (Exception e) {
-            LOGGER.error("COULD NOT CreateFile: {}", filePath);
-            LOGGER.error(e.getMessage());
-            throw new RuntimeException(e);
-        }
-    }
+      // Add another file to the list of written files.
+      writtenFiles.add(filePath);
 
-    public synchronized void close() throws IOException {
-        if (this.currentWriter != null) {
-            this.currentWriter.flush();
-            this.currentWriter.close();
-            this.currentWriter = null;
-            LOGGER.info("File Closed");
-        }
+      LOGGER.info("File Created: {}", filePath);
+    } catch (Exception ex) {
+      LOGGER.error("COULD NOT CreateFile: {}", filePath);
+      LOGGER.error(ex.getMessage());
+      throw new RuntimeException(ex);
     }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        mapper = StreamsJacksonMapper.getInstance();
-        lineWriterUtil = LineReadWriteUtil.getInstance(hdfsConfiguration);
-        connectToWebHDFS();
-        path = new Path(hdfsConfiguration.getPath() + "/" + hdfsConfiguration.getWriterPath());
+  }
+
+  @Override
+  public synchronized void close() throws IOException {
+    if (this.currentWriter != null) {
+      this.currentWriter.flush();
+      this.currentWriter.close();
+      this.currentWriter = null;
+      LOGGER.info("File Closed");
     }
-
-    @Override
-    public void cleanUp() {
-        try {
-            flush();
-        } catch (IOException e) {
-            LOGGER.error("Error flushing on cleanup", e);
-        }
-        try {
-            close();
-        } catch (IOException e) {
-            LOGGER.error("Error closing on cleanup", e);
-        }
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    mapper = StreamsJacksonMapper.getInstance();
+    lineWriterUtil = LineReadWriteUtil.getInstance(hdfsConfiguration);
+    connectToWebHDFS();
+    path = new Path(hdfsConfiguration.getPath() + "/" + hdfsConfiguration.getWriterPath());
+  }
+
+  @Override
+  public void cleanUp() {
+    try {
+      flush();
+    } catch (IOException ex) {
+      LOGGER.error("Error flushing on cleanup", ex);
     }
-
-    @Override
-    public DatumStatusCounter getDatumStatusCounter() {
-        DatumStatusCounter counters = new DatumStatusCounter();
-        counters.incrementAttempt(this.totalRecordsWritten);
-        counters.incrementStatus(DatumStatus.SUCCESS, this.totalRecordsWritten);
-        return counters;
+    try {
+      close();
+    } catch (IOException ex) {
+      LOGGER.error("Error closing on cleanup", ex);
     }
+  }
+
+  @Override
+  public DatumStatusCounter getDatumStatusCounter() {
+    DatumStatusCounter counters = new DatumStatusCounter();
+    counters.incrementAttempt(this.totalRecordsWritten);
+    counters.incrementStatus(DatumStatus.SUCCESS, this.totalRecordsWritten);
+    return counters;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriterTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriterTask.java b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriterTask.java
index 00cf17f..eb808c1 100644
--- a/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriterTask.java
+++ b/streams-contrib/streams-persist-hdfs/src/main/java/org/apache/streams/hdfs/WebHdfsPersistWriterTask.java
@@ -19,38 +19,43 @@
 package org.apache.streams.hdfs;
 
 import org.apache.streams.core.StreamsDatum;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Random;
 
+/**
+ * WebHdfsPersistReaderTask writes to hdfs on behalf of
+ * @see org.apache.streams.hdfs.WebHdfsPersistWriter
+ */
 public class WebHdfsPersistWriterTask implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistWriterTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(WebHdfsPersistWriterTask.class);
 
-    private WebHdfsPersistWriter writer;
+  private WebHdfsPersistWriter writer;
 
-    public WebHdfsPersistWriterTask(WebHdfsPersistWriter writer) {
-        this.writer = writer;
-    }
+  public WebHdfsPersistWriterTask(WebHdfsPersistWriter writer) {
+    this.writer = writer;
+  }
 
-    @Override
-    public void run() {
-
-        while(true) {
-            if( writer.persistQueue.peek() != null ) {
-                try {
-                    StreamsDatum entry = writer.persistQueue.remove();
-                    writer.write(entry);
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-            try {
-                Thread.sleep(new Random().nextInt(1));
-            } catch (InterruptedException e) {}
-        }
+  @Override
+  public void run() {
 
+    while (true) {
+      if ( writer.persistQueue.peek() != null ) {
+        try {
+          StreamsDatum entry = writer.persistQueue.remove();
+          writer.write(entry);
+        } catch (Exception e) {
+          e.printStackTrace();
+        }
+      }
+      try {
+        Thread.sleep(new Random().nextInt(1));
+      } catch (InterruptedException e) {}
     }
 
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/HdfsPersistConfigTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/HdfsPersistConfigTest.java b/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/HdfsPersistConfigTest.java
index 819414a..a35f124 100644
--- a/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/HdfsPersistConfigTest.java
+++ b/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/HdfsPersistConfigTest.java
@@ -33,7 +33,7 @@ import static org.junit.Assert.*;
  */
 public class HdfsPersistConfigTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(HdfsPersistConfigTest.class);
+    private static final Logger LOGGER = LoggerFactory.getLogger(HdfsPersistConfigTest.class);
 
     @Test
     public void getWriterFileUriTest()

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/TestHdfsPersist.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/TestHdfsPersist.java b/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/TestHdfsPersist.java
index ff33ec3..7191d9a 100644
--- a/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/TestHdfsPersist.java
+++ b/streams-contrib/streams-persist-hdfs/src/test/java/org/apache/streams/hdfs/test/TestHdfsPersist.java
@@ -46,7 +46,7 @@ import java.util.List;
  */
 public class TestHdfsPersist {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TestHdfsPersist.class);
+    private static final Logger LOGGER = LoggerFactory.getLogger(TestHdfsPersist.class);
 
     ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReader.java b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReader.java
index d54e794..64f7200 100644
--- a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReader.java
+++ b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReader.java
@@ -18,19 +18,14 @@
 
 package org.apache.streams.kafka;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import kafka.consumer.Consumer;
-import kafka.consumer.ConsumerConfig;
-import kafka.consumer.KafkaStream;
-import kafka.consumer.Whitelist;
-import kafka.javaapi.consumer.ConsumerConnector;
-import kafka.serializer.StringDecoder;
-import kafka.utils.VerifiableProperties;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistReader;
 import org.apache.streams.core.StreamsResultSet;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -45,113 +40,132 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
-public class KafkaPersistReader implements StreamsPersistReader, Serializable {
-
-    public final static String STREAMS_ID = "KafkaPersistReader";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPersistReader.class);
-
-    protected volatile Queue<StreamsDatum> persistQueue;
-
-    private ObjectMapper mapper = new ObjectMapper();
-
-    private KafkaConfiguration config;
-
-    private ConsumerConnector consumerConnector;
-
-    public List<KafkaStream<String, String>> inStreams;
-
-    private ExecutorService executor = Executors.newSingleThreadExecutor();
+import kafka.consumer.Consumer;
+import kafka.consumer.ConsumerConfig;
+import kafka.consumer.KafkaStream;
+import kafka.consumer.Whitelist;
+import kafka.javaapi.consumer.ConsumerConnector;
+import kafka.serializer.StringDecoder;
+import kafka.utils.VerifiableProperties;
 
-    public KafkaPersistReader() {
-        this.config = new ComponentConfigurator<>(KafkaConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("kafka"));
-        this.persistQueue  = new ConcurrentLinkedQueue<>();
-    }
+/**
+ * KafkaPersistReader reads documents from kafka.
+ */
+public class KafkaPersistReader implements StreamsPersistReader, Serializable {
 
-    public KafkaPersistReader(Queue<StreamsDatum> persistQueue) {
-        this.config = new ComponentConfigurator<>(KafkaConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("kafka"));
-        this.persistQueue = persistQueue;
-    }
+  public static final String STREAMS_ID = "KafkaPersistReader";
 
-    public void setConfig(KafkaConfiguration config) {
-        this.config = config;
-    }
+  private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPersistReader.class);
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    @Override
-    public void startStream() {
+  private ObjectMapper mapper = new ObjectMapper();
 
-        Properties props = new Properties();
-        props.setProperty("serializer.encoding", "UTF8");
+  private KafkaConfiguration config;
 
-        ConsumerConfig consumerConfig = new ConsumerConfig(props);
+  private ConsumerConnector consumerConnector;
 
-        consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
+  public List<KafkaStream<String, String>> inStreams;
 
-        Whitelist topics = new Whitelist(config.getTopic());
-        VerifiableProperties vprops = new VerifiableProperties(props);
+  private ExecutorService executor = Executors.newSingleThreadExecutor();
 
-        inStreams = consumerConnector.createMessageStreamsByFilter(topics, 1, new StringDecoder(vprops), new StringDecoder(vprops));
+  /**
+   * KafkaPersistReader constructor - resolves KafkaConfiguration from JVM 'kafka'.
+   */
+  public KafkaPersistReader() {
+    this.config = new ComponentConfigurator<>(KafkaConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("kafka"));
+    this.persistQueue  = new ConcurrentLinkedQueue<>();
+  }
 
-        for (final KafkaStream stream : inStreams) {
-            executor.submit(new KafkaPersistReaderTask(this, stream));
-        }
+  /**
+   * KafkaPersistReader constructor - uses supplied persistQueue.
+   */
+  public KafkaPersistReader(Queue<StreamsDatum> persistQueue) {
+    this.config = new ComponentConfigurator<>(KafkaConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("kafka"));
+    this.persistQueue = persistQueue;
+  }
 
-    }
+  public void setConfig(KafkaConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public StreamsResultSet readAll() {
-        return readCurrent();
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public StreamsResultSet readCurrent() {
-        return null;
-    }
+  @Override
+  public void startStream() {
 
-    @Override
-    public StreamsResultSet readNew(BigInteger bigInteger) {
-        return null;
-    }
+    Properties props = new Properties();
+    props.setProperty("serializer.encoding", "UTF8");
 
-    @Override
-    public StreamsResultSet readRange(DateTime dateTime, DateTime dateTime2) {
-        return null;
-    }
+    ConsumerConfig consumerConfig = new ConsumerConfig(props);
 
-    @Override
-    public boolean isRunning() {
-        return !executor.isShutdown() && !executor.isTerminated();
-    }
+    consumerConnector = Consumer.createJavaConsumerConnector(consumerConfig);
 
-    private static ConsumerConfig createConsumerConfig(String a_zookeeper, String a_groupId) {
-        Properties props = new Properties();
-        props.put("zookeeper.connect", a_zookeeper);
-        props.put("group.id", a_groupId);
-        props.put("zookeeper.session.timeout.ms", "400");
-        props.put("zookeeper.sync.time.ms", "200");
-        props.put("auto.commit.interval.ms", "1000");
-        return new ConsumerConfig(props);
-    }
+    Whitelist topics = new Whitelist(config.getTopic());
+    VerifiableProperties vprops = new VerifiableProperties(props);
 
-    @Override
-    public void prepare(Object configurationObject) {
+    inStreams = consumerConnector.createMessageStreamsByFilter(topics, 1, new StringDecoder(vprops), new StringDecoder(vprops));
 
+    for (final KafkaStream stream : inStreams) {
+      executor.submit(new KafkaPersistReaderTask(this, stream));
     }
 
-    @Override
-    public void cleanUp() {
-        consumerConnector.shutdown();
-        while( !executor.isTerminated()) {
-            try {
-                executor.awaitTermination(5, TimeUnit.SECONDS);
-            } catch (InterruptedException ignored) {}
-        }
+  }
+
+  @Override
+  public StreamsResultSet readAll() {
+    return readCurrent();
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger bigInteger) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime dateTime, DateTime dateTime2) {
+    return null;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return !executor.isShutdown() && !executor.isTerminated();
+  }
+
+  private static ConsumerConfig createConsumerConfig(String zookeeper, String groupId) {
+    Properties props = new Properties();
+    props.put("zookeeper.connect", zookeeper);
+    props.put("group.id", groupId);
+    props.put("zookeeper.session.timeout.ms", "400");
+    props.put("zookeeper.sync.time.ms", "200");
+    props.put("auto.commit.interval.ms", "1000");
+    return new ConsumerConfig(props);
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+
+  }
+
+  @Override
+  public void cleanUp() {
+    consumerConnector.shutdown();
+    while ( !executor.isTerminated()) {
+      try {
+        executor.awaitTermination(5, TimeUnit.SECONDS);
+      } catch (InterruptedException interrupt) {
+        LOGGER.trace("Interrupt", interrupt);
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReaderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReaderTask.java b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReaderTask.java
index 83493e0..199be73 100644
--- a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReaderTask.java
+++ b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistReaderTask.java
@@ -18,45 +18,51 @@
 
 package org.apache.streams.kafka;
 
-import kafka.consumer.ConsumerIterator;
-import kafka.consumer.KafkaStream;
-import kafka.message.MessageAndMetadata;
 import org.apache.streams.core.StreamsDatum;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Random;
 
-public class KafkaPersistReaderTask implements Runnable {
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPersistReaderTask.class);
-
-    private KafkaPersistReader reader;
-    private KafkaStream<String,String> stream;
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+import kafka.message.MessageAndMetadata;
 
-    public KafkaPersistReaderTask(KafkaPersistReader reader, KafkaStream<String,String> stream) {
-        this.reader = reader;
-        this.stream = stream;
-    }
+/**
+ * KafkaPersistReaderTask reads documents from kafka on behalf of
+ * @see org.apache.streams.kafka.KafkaPersistReader
+ */
+public class KafkaPersistReaderTask implements Runnable {
 
+  private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPersistReaderTask.class);
 
+  private KafkaPersistReader reader;
+  private KafkaStream<String,String> stream;
 
-    @Override
-    public void run() {
+  public KafkaPersistReaderTask(KafkaPersistReader reader, KafkaStream<String,String> stream) {
+    this.reader = reader;
+    this.stream = stream;
+  }
 
-        MessageAndMetadata<String,String> item;
-        while(true) {
+  @Override
+  public void run() {
 
-            ConsumerIterator<String, String> it = stream.iterator();
-            while (it.hasNext()) {
-                item = it.next();
-                reader.persistQueue.add(new StreamsDatum(item.message()));
-            }
-            try {
-                Thread.sleep(new Random().nextInt(100));
-            } catch (InterruptedException e) {}
-        }
+    MessageAndMetadata<String,String> item;
+    while (true) {
 
+      ConsumerIterator<String, String> it = stream.iterator();
+      while (it.hasNext()) {
+        item = it.next();
+        reader.persistQueue.add(new StreamsDatum(item.message()));
+      }
+      try {
+        Thread.sleep(new Random().nextInt(100));
+      } catch (InterruptedException interrupt) {
+        LOGGER.trace("Interrupt", interrupt);
+      }
     }
 
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriter.java b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriter.java
index 83032e6..40e125f 100644
--- a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriter.java
+++ b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriter.java
@@ -18,16 +18,15 @@
 
 package org.apache.streams.kafka;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import kafka.javaapi.producer.Producer;
-import kafka.producer.KeyedMessage;
-import kafka.producer.ProducerConfig;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
 import org.apache.streams.util.GuidUtils;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,99 +35,114 @@ import java.util.Properties;
 import java.util.Queue;
 import java.util.concurrent.ConcurrentLinkedQueue;
 
-public class KafkaPersistWriter implements StreamsPersistWriter, Serializable, Runnable {
-
-    public final static String STREAMS_ID = "KafkaPersistWriter";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPersistWriter.class);
+import kafka.javaapi.producer.Producer;
+import kafka.producer.KeyedMessage;
+import kafka.producer.ProducerConfig;
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+/**
+ * KafkaPersistWriter writes documents to kafka.
+ */
+public class KafkaPersistWriter implements StreamsPersistWriter, Serializable, Runnable {
 
-    private ObjectMapper mapper = new ObjectMapper();
+  public static final String STREAMS_ID = "KafkaPersistWriter";
 
-    private KafkaConfiguration config;
+  private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPersistWriter.class);
 
-    private Producer<String, String> producer;
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    public KafkaPersistWriter() {
-        this.config = new ComponentConfigurator<>(KafkaConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("kafka"));
-        this.persistQueue  = new ConcurrentLinkedQueue<>();
-    }
+  private ObjectMapper mapper = new ObjectMapper();
 
-    public KafkaPersistWriter(Queue<StreamsDatum> persistQueue) {
-        this.config = new ComponentConfigurator<>(KafkaConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("kafka"));
-        this.persistQueue = persistQueue;
-    }
+  private KafkaConfiguration config;
 
-    public void setConfig(KafkaConfiguration config) {
-        this.config = config;
-    }
+  private Producer<String, String> producer;
 
-    public void start() {
-        Properties props = new Properties();
+  /**
+   * KafkaPersistWriter constructor - resolves KafkaConfiguration from JVM 'kafka'.
+   */
+  public KafkaPersistWriter() {
+    this.config = new ComponentConfigurator<>(KafkaConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("kafka"));
+    this.persistQueue  = new ConcurrentLinkedQueue<>();
+  }
 
-        props.put("metadata.broker.list", config.getBrokerlist());
-        props.put("serializer.class", "kafka.serializer.StringEncoder");
-        props.put("partitioner.class", "org.apache.streams.kafka.StreamsPartitioner");
-        props.put("request.required.acks", "1");
+  /**
+   * KafkaPersistWriter constructor - uses supplied persistQueue.
+   */
+  public KafkaPersistWriter(Queue<StreamsDatum> persistQueue) {
+    this.config = new ComponentConfigurator<>(KafkaConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("kafka"));
+    this.persistQueue = persistQueue;
+  }
 
-        ProducerConfig config = new ProducerConfig(props);
+  public void setConfig(KafkaConfiguration config) {
+    this.config = config;
+  }
 
-        producer = new Producer<>(config);
+  /**
+   * run persist writer thread
+   */
+  public void start() {
+    Properties props = new Properties();
 
-        new Thread(new KafkaPersistWriterTask(this)).start();
-    }
+    props.put("metadata.broker.list", config.getBrokerlist());
+    props.put("serializer.class", "kafka.serializer.StringEncoder");
+    props.put("partitioner.class", "org.apache.streams.kafka.StreamsPartitioner");
+    props.put("request.required.acks", "1");
 
-    public void stop() {
-        producer.close();
-    }
+    ProducerConfig config = new ProducerConfig(props);
 
-    public void setPersistQueue(Queue<StreamsDatum> persistQueue) {
-        this.persistQueue = persistQueue;
-    }
+    producer = new Producer<>(config);
 
-    public Queue<StreamsDatum> getPersistQueue() {
-        return this.persistQueue;
-    }
+    new Thread(new KafkaPersistWriterTask(this)).start();
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public void stop() {
+    producer.close();
+  }
 
-    @Override
-    public void write(StreamsDatum entry) {
+  public void setPersistQueue(Queue<StreamsDatum> persistQueue) {
+    this.persistQueue = persistQueue;
+  }
 
-        try {
-            String text = mapper.writeValueAsString(entry);
+  public Queue<StreamsDatum> getPersistQueue() {
+    return this.persistQueue;
+  }
 
-            String hash = GuidUtils.generateGuid(text);
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-            KeyedMessage<String, String> data = new KeyedMessage<>(config.getTopic(), hash, text);
+  @Override
+  public void write(StreamsDatum entry) {
 
-            producer.send(data);
+    try {
 
-        } catch (JsonProcessingException e) {
-            LOGGER.warn("save: {}", e);
-        }// put
-    }
+      String text = mapper.writeValueAsString(entry);
 
-    @Override
-    public void run() {
-        start();
+      String hash = GuidUtils.generateGuid(text);
 
-        // stop();
-    }
+      KeyedMessage<String, String> data = new KeyedMessage<>(config.getTopic(), hash, text);
 
-    @Override
-    public void prepare(Object configurationObject) {
-        start();
-    }
+      producer.send(data);
 
-    @Override
-    public void cleanUp() {
-        stop();
+    } catch (JsonProcessingException ex) {
+      LOGGER.warn("save: {}", ex);
     }
+  }
+
+  @Override
+  public void run() {
+    start();
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    start();
+  }
+
+  @Override
+  public void cleanUp() {
+    stop();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriterTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriterTask.java b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriterTask.java
index 5d8ee9e..dae7aa2 100644
--- a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriterTask.java
+++ b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/KafkaPersistWriterTask.java
@@ -19,38 +19,45 @@
 package org.apache.streams.kafka;
 
 import org.apache.streams.core.StreamsDatum;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Random;
 
+/**
+ * KafkaPersistWriterTask writes documents to kafka on behalf of
+ * @see org.apache.streams.kafka.KafkaPersistWriter
+ */
 public class KafkaPersistWriterTask implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPersistWriterTask.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(KafkaPersistWriterTask.class);
 
-    private KafkaPersistWriter writer;
+  private KafkaPersistWriter writer;
 
-    public KafkaPersistWriterTask(KafkaPersistWriter writer) {
-        this.writer = writer;
-    }
+  public KafkaPersistWriterTask(KafkaPersistWriter writer) {
+    this.writer = writer;
+  }
 
-    @Override
-    public void run() {
-
-        while(true) {
-            if( writer.getPersistQueue().peek() != null ) {
-                try {
-                    StreamsDatum entry = writer.persistQueue.remove();
-                    writer.write(entry);
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-            try {
-                Thread.sleep(new Random().nextInt(100));
-            } catch (InterruptedException e) {}
-        }
+  @Override
+  public void run() {
 
+    while (true) {
+      if ( writer.getPersistQueue().peek() != null ) {
+        try {
+          StreamsDatum entry = writer.persistQueue.remove();
+          writer.write(entry);
+        } catch (Exception ex) {
+          ex.printStackTrace();
+        }
+      }
+      try {
+        Thread.sleep(new Random().nextInt(100));
+      } catch (InterruptedException interrupt) {
+        LOGGER.trace("Interrupt", interrupt);
+      }
     }
 
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/StreamsPartitioner.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/StreamsPartitioner.java b/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/StreamsPartitioner.java
deleted file mode 100644
index ebfff9a..0000000
--- a/streams-contrib/streams-persist-kafka/src/main/java/org/apache/streams/kafka/StreamsPartitioner.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.kafka;
-
-/**
- * Created by sblackmon on 12/15/13.
- */
-import kafka.producer.Partitioner;
-import kafka.utils.VerifiableProperties;
-
-public class StreamsPartitioner implements Partitioner<String> {
-    public StreamsPartitioner (VerifiableProperties props) {
-
-    }
-
-    public int partition(String key, int a_numPartitions) {
-        int partition = 0;
-        int offset = key.lastIndexOf('.');
-        if (offset > 0) {
-            partition = Integer.parseInt( key.substring(offset+1)) % a_numPartitions;
-        }
-        return partition;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistReader.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistReader.java b/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistReader.java
index ba77ff1..b6a7404 100644
--- a/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistReader.java
+++ b/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistReader.java
@@ -18,6 +18,14 @@
 
 package org.apache.streams.mongo;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistReader;
+import org.apache.streams.core.StreamsResultSet;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.base.Strings;
@@ -30,13 +38,7 @@ import com.mongodb.DBObject;
 import com.mongodb.MongoClient;
 import com.mongodb.MongoCredential;
 import com.mongodb.ServerAddress;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.DatumStatusCounter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistReader;
-import org.apache.streams.core.StreamsResultSet;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -52,230 +54,248 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+/**
+ * MongoPersistReader reads documents from mongo.
+ */
 public class MongoPersistReader implements StreamsPersistReader {
 
-    public static final String STREAMS_ID = "MongoPersistReader";
+  public static final String STREAMS_ID = "MongoPersistReader";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(MongoPersistReader.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistReader.class);
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-    private volatile AtomicLong lastWrite = new AtomicLong(System.currentTimeMillis());
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private volatile AtomicLong lastWrite = new AtomicLong(System.currentTimeMillis());
 
-    private ExecutorService executor;
+  private ExecutorService executor;
 
-    private MongoConfiguration config;
+  private MongoConfiguration config;
 
-    protected MongoClient client;
-    protected DB db;
-    protected DBCollection collection;
+  protected MongoClient client;
+  protected DB db;
+  protected DBCollection collection;
 
-    protected DBCursor cursor;
+  protected DBCursor cursor;
 
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
-
-    public MongoPersistReader() {
-        this.config = new ComponentConfigurator<>(MongoConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("mongo"));
-    }
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    public MongoPersistReader(MongoConfiguration config) {
-        this.config = config;
-    }
-
-    public MongoPersistReader(Queue<StreamsDatum> persistQueue) {
-        this.config = new ComponentConfigurator<>(MongoConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("mongo"));
-        this.persistQueue = persistQueue;
-    }
+  /**
+   * KafkaPersistReader constructor - resolves KafkaConfiguration from JVM 'mongo'.
+   */
+  public MongoPersistReader() {
+    this.config = new ComponentConfigurator<>(MongoConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("mongo"));
+  }
 
-    public void setPersistQueue(Queue<StreamsDatum> persistQueue) {
-        this.persistQueue = persistQueue;
-    }
+  /**
+   * KafkaPersistReader constructor - uses supplied MongoConfiguration.
+   * @param config config
+   */
+  public MongoPersistReader(MongoConfiguration config) {
+    this.config = config;
+  }
 
-    public Queue<StreamsDatum> getPersistQueue() {
-        return persistQueue;
-    }
+  /**
+   * KafkaPersistReader constructor - uses supplied persistQueue.
+   * @param persistQueue persistQueue
+   */
+  public MongoPersistReader(Queue<StreamsDatum> persistQueue) {
+    this.config = new ComponentConfigurator<>(MongoConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("mongo"));
+    this.persistQueue = persistQueue;
+  }
 
-    public void stop() {
-    }
+  public void setPersistQueue(Queue<StreamsDatum> persistQueue) {
+    this.persistQueue = persistQueue;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public Queue<StreamsDatum> getPersistQueue() {
+    return persistQueue;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  public void stop() {
+  }
 
-        connectToMongo();
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        if( client == null ||
-                collection == null )
-            throw new RuntimeException("Unable to connect!");
+  @Override
+  public void prepare(Object configurationObject) {
 
-        cursor = collection.find();
+    connectToMongo();
 
-        if( cursor == null || !cursor.hasNext())
-            throw new RuntimeException("Collection not present or empty!");
+    if ( client == null
+        || collection == null ) {
+      throw new RuntimeException("Unable to connect!");
+    }
+    cursor = collection.find();
 
-        persistQueue = constructQueue();
+    if ( cursor == null
+        || !cursor.hasNext()) {
+      throw new RuntimeException("Collection not present or empty!");
+    }
 
-        executor = Executors.newSingleThreadExecutor();
+    persistQueue = constructQueue();
 
-    }
+    executor = Executors.newSingleThreadExecutor();
 
-    @Override
-    public void cleanUp() {
-        stop();
-    }
+  }
 
-    protected StreamsDatum prepareDatum(DBObject dbObject) {
+  @Override
+  public void cleanUp() {
+    stop();
+  }
 
-        ObjectNode objectNode;
-        String id;
+  protected StreamsDatum prepareDatum(DBObject dbObject) {
 
-        try {
-            objectNode = mapper.readValue(dbObject.toString(), ObjectNode.class);
-            id = objectNode.get("_id").get("$oid").asText();
-            objectNode.remove("_id");
-        } catch (IOException e) {
-            LOGGER.warn("document isn't valid JSON.");
-            return null;
-        }
+    ObjectNode objectNode;
+    String id;
 
-        return new StreamsDatum(objectNode, id);
+    try {
+      objectNode = mapper.readValue(dbObject.toString(), ObjectNode.class);
+      id = objectNode.get("_id").get("$oid").asText();
+      objectNode.remove("_id");
+    } catch (IOException ex) {
+      LOGGER.warn("document isn't valid JSON.");
+      return null;
     }
 
-    private synchronized void connectToMongo() {
+    return new StreamsDatum(objectNode, id);
+  }
 
-        ServerAddress serverAddress = new ServerAddress(config.getHost(), config.getPort().intValue());
+  private synchronized void connectToMongo() {
 
-        if (!Strings.isNullOrEmpty(config.getUser()) && !Strings.isNullOrEmpty(config.getPassword())) {
-            MongoCredential credential =
-                    MongoCredential.createCredential(config.getUser(), config.getDb(), config.getPassword().toCharArray());
-            client = new MongoClient(serverAddress, Lists.newArrayList(credential));
-        } else {
-            client = new MongoClient(serverAddress);
-        }
+    ServerAddress serverAddress = new ServerAddress(config.getHost(), config.getPort().intValue());
 
-        db = client.getDB(config.getDb());
+    if (!Strings.isNullOrEmpty(config.getUser()) && !Strings.isNullOrEmpty(config.getPassword())) {
+      MongoCredential credential =
+          MongoCredential.createCredential(config.getUser(), config.getDb(), config.getPassword().toCharArray());
+      client = new MongoClient(serverAddress, Lists.newArrayList(credential));
+    } else {
+      client = new MongoClient(serverAddress);
+    }
 
-        if (!db.collectionExists(config.getCollection())) {
-            db.createCollection(config.getCollection(), null);
-        }
+    db = client.getDB(config.getDb());
 
-        collection = db.getCollection(config.getCollection());
+    if (!db.collectionExists(config.getCollection())) {
+      db.createCollection(config.getCollection(), null);
     }
 
-    @Override
-    public StreamsResultSet readAll() {
-
-        try (DBCursor cursor = collection.find()) {
-            while (cursor.hasNext()) {
-                DBObject dbObject = cursor.next();
-                StreamsDatum datum = prepareDatum(dbObject);
-                write(datum);
-            }
-        }
+    collection = db.getCollection(config.getCollection());
+  }
 
-        return readCurrent();
-    }
+  @Override
+  public StreamsResultSet readAll() {
 
-    @Override
-    public void startStream() {
+    try (DBCursor cursor = collection.find()) {
+      while (cursor.hasNext()) {
+        DBObject dbObject = cursor.next();
+        StreamsDatum datum = prepareDatum(dbObject);
+        write(datum);
+      }
+    }
 
-        LOGGER.debug("startStream");
-        MongoPersistReaderTask readerTask = new MongoPersistReaderTask(this);
-        Thread readerTaskThread = new Thread(readerTask);
-        Future future = executor.submit(readerTaskThread);
+    return readCurrent();
+  }
 
-        while( !future.isDone() && !future.isCancelled()) {
-            try {
-                Thread.sleep(1000);
-            } catch (InterruptedException ignored) {}
-        }
+  @Override
+  public void startStream() {
 
-        executor.shutdown();
+    LOGGER.debug("startStream");
+    MongoPersistReaderTask readerTask = new MongoPersistReaderTask(this);
+    Thread readerTaskThread = new Thread(readerTask);
+    Future future = executor.submit(readerTaskThread);
 
+    while ( !future.isDone() && !future.isCancelled()) {
+      try {
+        Thread.sleep(1000);
+      } catch (InterruptedException interrupt) {
+        LOGGER.trace("Interrupt", interrupt);
+      }
     }
 
-    @Override
-    public StreamsResultSet readCurrent() {
+    executor.shutdown();
 
-        StreamsResultSet current;
+  }
 
-        try {
-            lock.writeLock().lock();
-            current = new StreamsResultSet(persistQueue);
-            current.setCounter(new DatumStatusCounter());
-            persistQueue = constructQueue();
-        } finally {
-            lock.writeLock().unlock();
-        }
+  @Override
+  public StreamsResultSet readCurrent() {
 
-        return current;
-    }
+    StreamsResultSet current;
 
-    //The locking may appear to be counter intuitive but we really don't care if multiple threads offer to the queue
-    //as it is a synchronized queue.  What we do care about is that we don't want to be offering to the current reference
-    //if the queue is being replaced with a new instance
-    protected void write(StreamsDatum entry) {
-        boolean success;
-        do {
-            try {
-                lock.readLock().lock();
-                success = persistQueue.offer(entry);
-                Thread.yield();
-            } finally {
-                lock.readLock().unlock();
-            }
-        }
-        while (!success);
+    try {
+      lock.writeLock().lock();
+      current = new StreamsResultSet(persistQueue);
+      current.setCounter(new DatumStatusCounter());
+      persistQueue = constructQueue();
+    } finally {
+      lock.writeLock().unlock();
     }
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
+    return current;
+  }
+
+  //The locking may appear to be counter intuitive but we really don't care if multiple threads offer to the queue
+  //as it is a synchronized queue.  What we do care about is that we don't want to be offering to the current reference
+  //if the queue is being replaced with a new instance
+  protected void write(StreamsDatum entry) {
+    boolean success;
+    do {
+      try {
+        lock.readLock().lock();
+        success = persistQueue.offer(entry);
+        Thread.yield();
+      } finally {
+        lock.readLock().unlock();
+      }
     }
+    while (!success);
+  }
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
 
-    @Override
-    public boolean isRunning() {
-        return !executor.isTerminated() || !executor.isShutdown();
-    }
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
 
-    private Queue<StreamsDatum> constructQueue() {
-        return Queues.synchronizedQueue(new LinkedBlockingQueue<StreamsDatum>(10000));
-    }
+  @Override
+  public boolean isRunning() {
+    return !executor.isTerminated() || !executor.isShutdown();
+  }
 
-    public class MongoPersistReaderTask implements Runnable {
+  private Queue<StreamsDatum> constructQueue() {
+    return Queues.synchronizedQueue(new LinkedBlockingQueue<StreamsDatum>(10000));
+  }
 
-        private MongoPersistReader reader;
+  public class MongoPersistReaderTask implements Runnable {
 
-        public MongoPersistReaderTask(MongoPersistReader reader) {
-            this.reader = reader;
-        }
+    private MongoPersistReader reader;
 
-        @Override
-        public void run() {
+    public MongoPersistReaderTask(MongoPersistReader reader) {
+      this.reader = reader;
+    }
 
-            try {
-                while(reader.cursor.hasNext()) {
-                    DBObject dbObject = reader.cursor.next();
-                    StreamsDatum datum = reader.prepareDatum(dbObject);
-                    reader.write(datum);
-                }
-            } finally {
-                reader.cursor.close();
-            }
+    @Override
+    public void run() {
 
+      try {
+        while (reader.cursor.hasNext()) {
+          DBObject dbObject = reader.cursor.next();
+          StreamsDatum datum = reader.prepareDatum(dbObject);
+          reader.write(datum);
         }
+      } finally {
+        reader.cursor.close();
+      }
 
     }
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistWriter.java b/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistWriter.java
index 5f6ac1f..6072f58 100644
--- a/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistWriter.java
+++ b/streams-contrib/streams-persist-mongo/src/main/java/org/apache/streams/mongo/MongoPersistWriter.java
@@ -18,6 +18,12 @@
 
 package org.apache.streams.mongo;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistWriter;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.base.Strings;
@@ -29,14 +35,12 @@ import com.mongodb.MongoClient;
 import com.mongodb.MongoCredential;
 import com.mongodb.ServerAddress;
 import com.mongodb.util.JSON;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistWriter;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.Closeable;
+import java.io.Flushable;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -50,209 +54,217 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
-public class MongoPersistWriter implements StreamsPersistWriter, Runnable {
+public class MongoPersistWriter implements StreamsPersistWriter, Runnable, Flushable, Closeable {
 
-    public final static String STREAMS_ID = "MongoPersistWriter";
+  public static final String STREAMS_ID = "MongoPersistWriter";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(MongoPersistWriter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistWriter.class);
 
-    private final static long MAX_WRITE_LATENCY = 1000;
+  private static final long MAX_WRITE_LATENCY = 1000;
 
-    protected volatile Queue<StreamsDatum> persistQueue;
+  protected volatile Queue<StreamsDatum> persistQueue;
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-    private volatile AtomicLong lastWrite = new AtomicLong(System.currentTimeMillis());
-    private ScheduledExecutorService backgroundFlushTask = Executors.newSingleThreadScheduledExecutor();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private volatile AtomicLong lastWrite = new AtomicLong(System.currentTimeMillis());
+  private ScheduledExecutorService backgroundFlushTask = Executors.newSingleThreadScheduledExecutor();
 
-    private MongoConfiguration config;
+  private MongoConfiguration config;
 
-    protected MongoClient client;
-    protected DB db;
-    protected DBCollection collection;
+  protected MongoClient client;
+  protected DB db;
+  protected DBCollection collection;
 
-    protected List<DBObject> insertBatch = new ArrayList<>();
+  protected List<DBObject> insertBatch = new ArrayList<>();
 
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    public MongoPersistWriter() {
-        this(new ComponentConfigurator<>(MongoConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("mongo")));
-    }
-
-    public MongoPersistWriter(MongoConfiguration config) {
-        this.config = config;
-    }
+  public MongoPersistWriter() {
+    this(new ComponentConfigurator<>(MongoConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("mongo")));
+  }
 
-    public void setPersistQueue(Queue<StreamsDatum> persistQueue) {
-        this.persistQueue = persistQueue;
-    }
-
-    public Queue<StreamsDatum> getPersistQueue() {
-        return persistQueue;
-    }
+  public MongoPersistWriter(MongoConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public void setPersistQueue(Queue<StreamsDatum> persistQueue) {
+    this.persistQueue = persistQueue;
+  }
 
-    @Override
-    public void write(StreamsDatum streamsDatum) {
+  public Queue<StreamsDatum> getPersistQueue() {
+    return persistQueue;
+  }
 
-        DBObject dbObject = prepareObject(streamsDatum);
-        if (dbObject != null) {
-            addToBatch(dbObject);
-            flushIfNecessary();
-        }
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    public void flush() throws IOException {
-        try {
-            LOGGER.debug("Attempting to flush {} items to mongo", insertBatch.size());
-            lock.writeLock().lock();
-            collection.insert(insertBatch);
-            lastWrite.set(System.currentTimeMillis());
-            insertBatch = new ArrayList<>();
-        } finally {
-            lock.writeLock().unlock();
-        }
+  @Override
+  public void write(StreamsDatum streamsDatum) {
 
+    DBObject dbObject = prepareObject(streamsDatum);
+    if (dbObject != null) {
+      addToBatch(dbObject);
+      flushIfNecessary();
     }
-
-    public synchronized void close() throws IOException {
-//        client.cleanCursors(true);
-//        backgroundFlushTask.shutdownNow();
+  }
+
+  @Override
+  public void flush() throws IOException {
+    try {
+      LOGGER.debug("Attempting to flush {} items to mongo", insertBatch.size());
+      lock.writeLock().lock();
+      collection.insert(insertBatch);
+      lastWrite.set(System.currentTimeMillis());
+      insertBatch = new ArrayList<>();
+    } finally {
+      lock.writeLock().unlock();
     }
 
-    public void start() {
-        connectToMongo();
-        backgroundFlushTask.scheduleAtFixedRate(new Runnable() {
-            @Override
-            public void run() {
-                flushIfNecessary();
-            }
-        }, 0, MAX_WRITE_LATENCY * 2, TimeUnit.MILLISECONDS);
+  }
+
+  public synchronized void close() throws IOException {
+    client.close();
+    backgroundFlushTask.shutdownNow();
+  }
+
+  /**
+   * start write thread.
+   */
+  public void start() {
+    connectToMongo();
+    backgroundFlushTask.scheduleAtFixedRate(new Runnable() {
+      @Override
+      public void run() {
+        flushIfNecessary();
+      }
+    }, 0, MAX_WRITE_LATENCY * 2, TimeUnit.MILLISECONDS);
+  }
+
+  /**
+   * stop.
+   */
+  public void stop() {
+
+    try {
+      flush();
+    } catch (IOException ex) {
+      LOGGER.error("Error flushing", ex);
     }
-
-    public void stop() {
-
-        try {
-            flush();
-        } catch (IOException e) {
-            LOGGER.error("Error flushing", e);
-        }
-        try {
-            close();
-        } catch (IOException e) {
-            LOGGER.error("Error closing", e);
-        }
-        try {
-            backgroundFlushTask.shutdown();
-            // Wait a while for existing tasks to terminate
-            if (!backgroundFlushTask.awaitTermination(15, TimeUnit.SECONDS)) {
-                backgroundFlushTask.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!backgroundFlushTask.awaitTermination(15, TimeUnit.SECONDS)) {
-                    LOGGER.error("Stream did not terminate");
-                }
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            backgroundFlushTask.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
-        }
-
+    try {
+      close();
+    } catch (IOException ex) {
+      LOGGER.error("Error closing", ex);
     }
-
-    @Override
-    public void run() {
-
-        while (true) {
-            if (persistQueue.peek() != null) {
-                try {
-                    StreamsDatum entry = persistQueue.remove();
-                    write(entry);
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-            try {
-                Thread.sleep(new Random().nextInt(1));
-            } catch (InterruptedException ignored) {
-            }
+    try {
+      backgroundFlushTask.shutdown();
+      // Wait a while for existing tasks to terminate
+      if (!backgroundFlushTask.awaitTermination(15, TimeUnit.SECONDS)) {
+        backgroundFlushTask.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!backgroundFlushTask.awaitTermination(15, TimeUnit.SECONDS)) {
+          LOGGER.error("Stream did not terminate");
         }
-
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        this.persistQueue = new ConcurrentLinkedQueue<>();
-        start();
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      backgroundFlushTask.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
     }
 
-    @Override
-    public void cleanUp() {
-        stop();
-    }
+  }
 
-    protected void flushIfNecessary() {
-        long lastLatency = System.currentTimeMillis() - lastWrite.get();
-        //Flush iff the size > 0 AND the size is divisible by 100 or the time between now and the last flush is greater
-        //than the maximum desired latency
-        if (insertBatch.size() > 0 && (insertBatch.size() % 100 == 0 || lastLatency > MAX_WRITE_LATENCY)) {
-            try {
-                flush();
-            } catch (IOException e) {
-                LOGGER.error("Error writing to Mongo", e);
-            }
-        }
-    }
+  @Override
+  public void run() {
 
-    protected void addToBatch(DBObject dbObject) {
+    while (true) {
+      if (persistQueue.peek() != null) {
         try {
-            lock.readLock().lock();
-            insertBatch.add(dbObject);
-        } finally {
-            lock.readLock().unlock();
+          StreamsDatum entry = persistQueue.remove();
+          write(entry);
+        } catch (Exception ex) {
+          ex.printStackTrace();
         }
+      }
+      try {
+        Thread.sleep(new Random().nextInt(1));
+      } catch (InterruptedException interrupt) {
+        LOGGER.trace("Interrupt", interrupt);
+      }
     }
 
-    protected DBObject prepareObject(StreamsDatum streamsDatum) {
-        DBObject dbObject = null;
-        if (streamsDatum.getDocument() instanceof String) {
-            dbObject = (DBObject) JSON.parse((String) streamsDatum.getDocument());
-        } else {
-            try {
-                ObjectNode node = mapper.valueToTree(streamsDatum.getDocument());
-                dbObject = (DBObject) JSON.parse(node.toString());
-            } catch (Exception e) {
-                LOGGER.error("Unsupported type: " + streamsDatum.getDocument().getClass(), e);
-            }
-        }
-        return dbObject;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.persistQueue = new ConcurrentLinkedQueue<>();
+    start();
+  }
+
+  @Override
+  public void cleanUp() {
+    stop();
+  }
+
+  protected void flushIfNecessary() {
+    long lastLatency = System.currentTimeMillis() - lastWrite.get();
+    //Flush iff the size > 0 AND the size is divisible by 100 or the time between now and the last flush is greater
+    //than the maximum desired latency
+    if (insertBatch.size() > 0 && (insertBatch.size() % 100 == 0 || lastLatency > MAX_WRITE_LATENCY)) {
+      try {
+        flush();
+      } catch (IOException ex) {
+        LOGGER.error("Error writing to Mongo", ex);
+      }
     }
+  }
+
+  protected void addToBatch(DBObject dbObject) {
+    try {
+      lock.readLock().lock();
+      insertBatch.add(dbObject);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  protected DBObject prepareObject(StreamsDatum streamsDatum) {
+    DBObject dbObject = null;
+    if (streamsDatum.getDocument() instanceof String) {
+      dbObject = (DBObject) JSON.parse((String) streamsDatum.getDocument());
+    } else {
+      try {
+        ObjectNode node = mapper.valueToTree(streamsDatum.getDocument());
+        dbObject = (DBObject) JSON.parse(node.toString());
+      } catch (Exception ex) {
+        LOGGER.error("Unsupported type: " + streamsDatum.getDocument().getClass(), ex);
+      }
+    }
+    return dbObject;
+  }
 
-    private synchronized void connectToMongo() {
-
-        ServerAddress serverAddress = new ServerAddress(config.getHost(), config.getPort().intValue());
+  private synchronized void connectToMongo() {
 
-        if (!Strings.isNullOrEmpty(config.getUser()) && !Strings.isNullOrEmpty(config.getPassword())) {
-            MongoCredential credential =
-                    MongoCredential.createCredential(config.getUser(), config.getDb(), config.getPassword().toCharArray());
-            client = new MongoClient(serverAddress, Lists.newArrayList(credential));
-        } else {
-            client = new MongoClient(serverAddress);
-        }
+    ServerAddress serverAddress = new ServerAddress(config.getHost(), config.getPort().intValue());
 
-        db = client.getDB(config.getDb());
+    if (!Strings.isNullOrEmpty(config.getUser()) && !Strings.isNullOrEmpty(config.getPassword())) {
+      MongoCredential credential =
+          MongoCredential.createCredential(config.getUser(), config.getDb(), config.getPassword().toCharArray());
+      client = new MongoClient(serverAddress, Lists.newArrayList(credential));
+    } else {
+      client = new MongoClient(serverAddress);
+    }
 
-        if (!db.collectionExists(config.getCollection())) {
-            db.createCollection(config.getCollection(), null);
-        }
+    db = client.getDB(config.getDb());
 
-        collection = db.getCollection(config.getCollection());
+    if (!db.collectionExists(config.getCollection())) {
+      db.createCollection(config.getCollection(), null);
     }
 
+    collection = db.getCollection(config.getCollection());
+  }
+
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-persist-mongo/src/test/java/org/apache/streams/mongo/test/MongoPersistIT.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-persist-mongo/src/test/java/org/apache/streams/mongo/test/MongoPersistIT.java b/streams-contrib/streams-persist-mongo/src/test/java/org/apache/streams/mongo/test/MongoPersistIT.java
index 18f5a62..2a2e170 100644
--- a/streams-contrib/streams-persist-mongo/src/test/java/org/apache/streams/mongo/test/MongoPersistIT.java
+++ b/streams-contrib/streams-persist-mongo/src/test/java/org/apache/streams/mongo/test/MongoPersistIT.java
@@ -18,12 +18,6 @@
 
 package org.apache.streams.mongo.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
-import org.apache.commons.io.Charsets;
-import org.apache.commons.io.IOUtils;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsResultSet;
@@ -32,6 +26,14 @@ import org.apache.streams.mongo.MongoConfiguration;
 import org.apache.streams.mongo.MongoPersistReader;
 import org.apache.streams.mongo.MongoPersistWriter;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+
+import org.apache.commons.io.Charsets;
+import org.apache.commons.io.IOUtils;
 import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -48,64 +50,64 @@ import static org.junit.Assert.assertEquals;
  */
 public class MongoPersistIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(MongoPersistIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistIT.class);
 
-    ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-    MongoConfiguration testConfiguration;
+  MongoConfiguration testConfiguration;
 
-    int count = 0;
+  int count = 0;
 
-    @Before
-    public void setup() throws Exception {
+  @Before
+  public void setup() throws Exception {
 
-        Config reference  = ConfigFactory.load();
-        File conf_file = new File("target/test-classes/MongoPersistIT.conf");
-        assert(conf_file.exists());
-        Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-        testConfiguration = new ComponentConfigurator<>(MongoConfiguration.class).detectConfiguration(typesafe, "mongo");
+    Config reference  = ConfigFactory.load();
+    File conf_file = new File("target/test-classes/MongoPersistIT.conf");
+    assert(conf_file.exists());
+    Config testResourceConfig  = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+    testConfiguration = new ComponentConfigurator<>(MongoConfiguration.class).detectConfiguration(typesafe, "mongo");
 
-    }
+  }
 
-    @Test
-    public void testMongoPersist() throws Exception {
+  @Test
+  public void testMongoPersist() throws Exception {
 
-        MongoPersistWriter writer = new MongoPersistWriter(testConfiguration);
+    MongoPersistWriter writer = new MongoPersistWriter(testConfiguration);
 
-        writer.prepare(null);
+    writer.prepare(null);
 
-        InputStream testActivityFolderStream = MongoPersistIT.class.getClassLoader()
-                .getResourceAsStream("activities");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+    InputStream testActivityFolderStream = MongoPersistIT.class.getClassLoader()
+        .getResourceAsStream("activities");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
 
-        for( String file : files) {
-            LOGGER.info("File: " + file );
-            InputStream testActivityFileStream = MongoPersistIT.class.getClassLoader()
-                    .getResourceAsStream("activities/" + file);
-            Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
-            activity.getAdditionalProperties().remove("$license");
-            StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
-            writer.write( datum );
-            LOGGER.info("Wrote: " + activity.getVerb() );
-            count++;
-        }
+    for( String file : files) {
+      LOGGER.info("File: " + file );
+      InputStream testActivityFileStream = MongoPersistIT.class.getClassLoader()
+          .getResourceAsStream("activities/" + file);
+      Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
+      activity.getAdditionalProperties().remove("$license");
+      StreamsDatum datum = new StreamsDatum(activity, activity.getVerb());
+      writer.write( datum );
+      LOGGER.info("Wrote: " + activity.getVerb() );
+      count++;
+    }
 
-        LOGGER.info("Total Written: {}", count );
+    LOGGER.info("Total Written: {}", count );
 
-        assertEquals( 89, count );
+    assertEquals( 89, count );
 
-        writer.cleanUp();
+    writer.cleanUp();
 
-        MongoPersistReader reader = new MongoPersistReader(testConfiguration);
+    MongoPersistReader reader = new MongoPersistReader(testConfiguration);
 
-        reader.prepare(null);
+    reader.prepare(null);
 
-        StreamsResultSet resultSet = reader.readAll();
+    StreamsResultSet resultSet = reader.readAll();
 
-        LOGGER.info("Total Read: {}", resultSet.size() );
+    LOGGER.info("Total Read: {}", resultSet.size() );
 
-        assertEquals( 89, resultSet.size() );
+    assertEquals( 89, resultSet.size() );
 
-    }
+  }
 }


[11/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorMojoIT.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorMojoIT.java b/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorMojoIT.java
index f2ccd2a..a1d48cc 100644
--- a/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorMojoIT.java
+++ b/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorMojoIT.java
@@ -41,46 +41,45 @@ import static org.apache.streams.plugins.test.StreamsPojoSourceGeneratorTest.jav
  */
 public class StreamsPojoSourceGeneratorMojoIT extends TestCase {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGeneratorMojoIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGeneratorMojoIT.class);
 
-    protected void setUp() throws Exception
-    {
-        // required for mojo lookups to work
-        super.setUp();
-    }
+  protected void setUp() throws Exception {
+    // required for mojo lookups to work
+    super.setUp();
+  }
 
 
-    @Test
-    public void testStreamsPojoSourceGeneratorMojo() throws Exception {
+  @Test
+  public void testStreamsPojoSourceGeneratorMojo() throws Exception {
 
-        File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-pojo" );
+    File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-pojo" );
 
-        Verifier verifier;
+    Verifier verifier;
 
-        verifier = new Verifier( testDir.getAbsolutePath() );
+    verifier = new Verifier( testDir.getAbsolutePath() );
 
-        List cliOptions = new ArrayList();
-        cliOptions.add( "-N" );
-        verifier.executeGoals( Lists.<String>newArrayList(
-                "clean",
-                "dependency:unpack-dependencies",
-                "generate-sources",
-                "compile"));
+    List cliOptions = new ArrayList();
+    cliOptions.add( "-N" );
+    verifier.executeGoals( Lists.<String>newArrayList(
+        "clean",
+        "dependency:unpack-dependencies",
+        "generate-sources",
+        "compile"));
 
-        verifier.verifyErrorFreeLog();
+    verifier.verifyErrorFreeLog();
 
-        verifier.resetStreams();
+    verifier.resetStreams();
 
-        File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-sources/pojo-mojo");
+    File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-sources/pojo-mojo");
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(javaFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() > 133 );
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(javaFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() > 133 );
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorTest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorTest.java b/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorTest.java
index 7e04e3e..a16c1da 100644
--- a/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorTest.java
+++ b/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorTest.java
@@ -19,72 +19,75 @@
 
 package org.apache.streams.plugins.test;
 
+import org.apache.streams.plugins.StreamsPojoGenerationConfig;
+import org.apache.streams.plugins.StreamsPojoSourceGenerator;
+
 import com.google.common.base.Predicate;
 import com.google.common.collect.Lists;
 import com.google.common.io.Files;
-import org.apache.streams.plugins.StreamsPojoGenerationConfig;
-import org.apache.streams.plugins.StreamsPojoSourceGenerator;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
 import java.io.File;
-import java.io.FileFilter;
 import java.util.Collection;
 import java.util.List;
+import javax.annotation.Nullable;
 
 /**
- * Test that Activity beans are compatible with the example activities in the spec.
+ * Tests that StreamsPojoSourceGenerator via SDK generates java sources.
+ *
+ * @throws Exception Exception
  */
 public class StreamsPojoSourceGeneratorTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGeneratorTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGeneratorTest.class);
 
-    public static final Predicate<File> javaFilter = new Predicate<File>() {
-        @Override
-        public boolean apply(@Nullable File file) {
-            if( file.getName().endsWith(".java") )
-                return true;
-            else return false;
-        }
-    };
+  public static final Predicate<File> javaFilter = new Predicate<File>() {
+    @Override
+    public boolean apply(@Nullable File file) {
+      if ( file.getName().endsWith(".java") ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  };
 
-    /**
-     * Tests that all example activities can be loaded into Activity beans
-     *
-     * @throws Exception
-     */
-    @Test
-    public void testStreamsPojoSourceGenerator() throws Exception {
+  /**
+   * Tests that StreamsPojoSourceGenerator via SDK generates pig resources.
+   *
+   * @throws Exception Exception
+   */
+  @Test
+  public void testStreamsPojoSourceGenerator() throws Exception {
 
-        StreamsPojoGenerationConfig config = new StreamsPojoGenerationConfig();
+    StreamsPojoGenerationConfig config = new StreamsPojoGenerationConfig();
 
-        List<String> sourcePaths = Lists.newArrayList(
-            "target/test-classes/activitystreams-schemas/activity.json",
-            "target/test-classes/activitystreams-schemas/collection.json",
-            "target/test-classes/activitystreams-schemas/media_link.json",
-            "target/test-classes/activitystreams-schemas/object.json",
-            "target/test-classes/activitystreams-schemas/objectTypes",
-            "target/test-classes/activitystreams-schemas/verbs"
-        );
-        config.setSourcePaths(sourcePaths);
+    List<String> sourcePaths = Lists.newArrayList(
+        "target/test-classes/activitystreams-schemas/activity.json",
+        "target/test-classes/activitystreams-schemas/collection.json",
+        "target/test-classes/activitystreams-schemas/media_link.json",
+        "target/test-classes/activitystreams-schemas/object.json",
+        "target/test-classes/activitystreams-schemas/objectTypes",
+        "target/test-classes/activitystreams-schemas/verbs"
+    );
+    config.setSourcePaths(sourcePaths);
 
-        config.setTargetPackage("org.apache.streams.pojo");
-        config.setTargetDirectory("target/generated-sources/pojo");
+    config.setTargetPackage("org.apache.streams.pojo");
+    config.setTargetDirectory("target/generated-sources/pojo");
 
-        StreamsPojoSourceGenerator streamsPojoSourceGenerator = new StreamsPojoSourceGenerator(config);
-        streamsPojoSourceGenerator.run();
+    StreamsPojoSourceGenerator streamsPojoSourceGenerator = new StreamsPojoSourceGenerator(config);
+    streamsPojoSourceGenerator.run();
 
-        assert( config.getTargetDirectory() != null );
-        assert( config.getTargetDirectory().exists() == true );
-        assert( config.getTargetDirectory().isDirectory() == true );
+    assert ( config.getTargetDirectory() != null );
+    assert ( config.getTargetDirectory().exists() == true );
+    assert ( config.getTargetDirectory().isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(config.getTargetDirectory())
-                .filter(javaFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() > 133 );
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(config.getTargetDirectory())
+        .filter(javaFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() > 133 );
 
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaGenerationConfig.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaGenerationConfig.java b/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaGenerationConfig.java
index 18ae551..f6ee814 100644
--- a/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaGenerationConfig.java
+++ b/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaGenerationConfig.java
@@ -19,47 +19,40 @@
 
 package org.apache.streams.plugins;
 
-import org.jsonschema2pojo.DefaultGenerationConfig;
-import org.jsonschema2pojo.util.URLUtil;
-
 import java.io.File;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
 import java.util.List;
 
 /**
- * Created by sblackmon on 3/27/16.
+ * Configures StreamsScalaSourceGenerator.
  */
 public class StreamsScalaGenerationConfig {
 
-    private List<String> sourcePackages;
-    private String targetPackage;
-    private String targetDirectory;
+  private List<String> sourcePackages;
+  private String targetPackage;
+  private String targetDirectory;
 
-    public void setSourcePackages(List<String> sourcePackages) {
-        this.sourcePackages = sourcePackages;
-    }
+  public void setSourcePackages(List<String> sourcePackages) {
+    this.sourcePackages = sourcePackages;
+  }
 
-    public List<String> getSourcePackages() {
-        return sourcePackages;
-    }
+  public List<String> getSourcePackages() {
+    return sourcePackages;
+  }
 
-    public void setTargetPackage(String targetPackage) {
-        this.targetPackage = targetPackage;
-    }
+  public void setTargetPackage(String targetPackage) {
+    this.targetPackage = targetPackage;
+  }
 
-    public void setTargetDirectory(String targetDirectory) {
-        this.targetDirectory = targetDirectory;
-    }
+  public void setTargetDirectory(String targetDirectory) {
+    this.targetDirectory = targetDirectory;
+  }
 
-    public String getTargetPackage() {
-        return targetPackage;
-    }
+  public String getTargetPackage() {
+    return targetPackage;
+  }
 
-    public File getTargetDirectory() {
-        return new File(targetDirectory);
-    }
+  public File getTargetDirectory() {
+    return new File(targetDirectory);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGenerator.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGenerator.java b/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGenerator.java
index 4891c0d..ce9b766 100644
--- a/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGenerator.java
+++ b/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGenerator.java
@@ -20,7 +20,6 @@
 package org.apache.streams.plugins;
 
 import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import org.reflections.ReflectionUtils;
@@ -43,337 +42,389 @@ import java.util.Map;
 import java.util.Set;
 
 /**
- * Created by sblackmon on 11/18/15.
+ * Embed within your own java code
+ *
+ * <p/>
+ * StreamsScalaGenerationConfig config = new StreamsScalaGenerationConfig();
+ * config.setTargetDirectory("target/generated-sources/scala");
+ * config.setTargetPackage("com.example");
+ * StreamsScalaSourceGenerator generator = new StreamsScalaSourceGenerator(config);
+ * generator.run();
+ *
  */
 public class StreamsScalaSourceGenerator implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGenerator.class);
-
-    private final static String LS = System.getProperty("line.separator");
-
-    private StreamsScalaGenerationConfig config;
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGenerator.class);
 
-    private Reflections reflections;
+  private static final String LS = System.getProperty("line.separator");
 
-    private String outDir;
+  private StreamsScalaGenerationConfig config;
 
-    public static void main(String[] args) {
-        StreamsScalaGenerationConfig config = new StreamsScalaGenerationConfig();
+  private Reflections reflections;
 
-        List<String> sourcePackages = Lists.newArrayList();
-        String targetDirectory = "target/generated-sources/pojo";
-        String targetPackage = "";
+  private String outDir;
 
-        if( args.length > 0 )
-            sourcePackages = Splitter.on(',').splitToList(args[0]);
-        if( args.length > 1 )
-            targetDirectory = args[1];
-        if( args.length > 2 )
-            targetPackage = args[2];
+  /**
+   * Run from CLI without Maven
+   *
+   * <p/>
+   * java -jar streams-plugin-scala-jar-with-dependencies.jar StreamsScalaSourceGenerator target/generated-sources
+   *
+   * @param args [targetDirectory, targetPackage]
+   * */
+  public static void main(String[] args) {
+    StreamsScalaGenerationConfig config = new StreamsScalaGenerationConfig();
 
-        config.setSourcePackages(sourcePackages);
-        config.setTargetPackage(targetPackage);
-        config.setTargetDirectory(targetDirectory);
+    List<String> sourcePackages = Lists.newArrayList();
+    String targetDirectory = "target/generated-sources/pojo";
+    String targetPackage = "";
 
-        StreamsScalaSourceGenerator streamsScalaSourceGenerator = new StreamsScalaSourceGenerator(config);
-        streamsScalaSourceGenerator.run();
+    if ( args.length > 0 ) {
+      sourcePackages = Splitter.on(',').splitToList(args[0]);
     }
-
-    public StreamsScalaSourceGenerator(StreamsScalaGenerationConfig config) {
-        this.config = config;
-        this.outDir = config.getTargetDirectory().getAbsolutePath();
-        reflections = new Reflections(
-                new ConfigurationBuilder()
-                        // TODO
-                        .forPackages(
-                                config.getSourcePackages()
-                                        .toArray(new String[config.getSourcePackages().size()])
-                        )
-                        .setScanners(
-                                new SubTypesScanner(),
-                                new TypeAnnotationsScanner()));
-
+    if ( args.length > 1 ) {
+      targetDirectory = args[1];
     }
-
-    public void run() {
-
-        List<Class<?>> serializableClasses = detectSerializableClasses();
-
-        LOGGER.info("Detected {} serialiables:", serializableClasses.size());
-        for( Class clazz : serializableClasses )
-            LOGGER.debug(clazz.toString());
-
-        List<Class<?>> pojoClasses = detectPojoClasses(serializableClasses);
-
-        LOGGER.info("Detected {} pojos:", pojoClasses.size());
-        for( Class clazz : pojoClasses )
-            LOGGER.debug(clazz.toString());
-
-        List<Class<?>> traits = detectTraits(pojoClasses);
-
-        LOGGER.info("Detected {} traits:", traits.size());
-        for( Class clazz : traits )
-            LOGGER.debug(clazz.toString());
-
-        List<Class<?>> cases = detectCases(pojoClasses);
-
-        LOGGER.info("Detected {} cases:", cases.size());
-        for( Class clazz : cases )
-            LOGGER.debug(clazz.toString());
-
-
-        for( Class clazz : traits ) {
-            String pojoPath = clazz.getPackage().getName().replace(".pojo.json", ".scala").replace(".","/")+"/traits/";
-            String pojoName = clazz.getSimpleName()+".scala";
-            String pojoScala = renderTrait(clazz);
-            writeFile(outDir+"/"+pojoPath+pojoName, pojoScala);
-        }
-
-        for( Class clazz : traits ) {
-            String pojoPath = clazz.getPackage().getName().replace(".pojo.json", ".scala").replace(".","/")+"/";
-            String pojoName = clazz.getSimpleName()+".scala";
-            String pojoScala = renderClass(clazz);
-            writeFile(outDir+"/"+pojoPath+pojoName, pojoScala);
-        }
-
-        for( Class clazz : cases ) {
-            String pojoPath = clazz.getPackage().getName().replace(".pojo.json", ".scala").replace(".","/")+"/";
-            String pojoName = clazz.getSimpleName()+".scala";
-            String pojoScala = renderCase(clazz);
-            writeFile(outDir+"/"+pojoPath+pojoName, pojoScala);
-        }
-
+    if ( args.length > 2 ) {
+      targetPackage = args[2];
     }
 
-    private void writeFile(String pojoFile, String pojoScala) {
-        try {
-            File path = new File(pojoFile);
-            File dir = path.getParentFile();
-            if( !dir.exists() )
-                dir.mkdirs();
-            Files.write(Paths.get(pojoFile), pojoScala.getBytes(), StandardOpenOption.CREATE_NEW);
-        } catch (Exception e) {
-            LOGGER.error("Write Exception: {}", e);
-        }
+    config.setSourcePackages(sourcePackages);
+    config.setTargetPackage(targetPackage);
+    config.setTargetDirectory(targetDirectory);
+
+    StreamsScalaSourceGenerator streamsScalaSourceGenerator = new StreamsScalaSourceGenerator(config);
+    streamsScalaSourceGenerator.run();
+  }
+
+  /**
+   * StreamsScalaSourceGenerator constructor.
+   * @param config StreamsScalaGenerationConfig
+   */
+  public StreamsScalaSourceGenerator(StreamsScalaGenerationConfig config) {
+    this.config = config;
+    this.outDir = config.getTargetDirectory().getAbsolutePath();
+    reflections = new Reflections(
+        new ConfigurationBuilder()
+            // TODO
+            .forPackages(
+                config.getSourcePackages()
+                    .toArray(new String[config.getSourcePackages().size()])
+            )
+            .setScanners(
+                new SubTypesScanner(),
+                new TypeAnnotationsScanner()));
+
+  }
+
+  @Override
+  public void run() {
+
+    List<Class<?>> serializableClasses = detectSerializableClasses();
+
+    LOGGER.info("Detected {} serialiables:", serializableClasses.size());
+    for ( Class clazz : serializableClasses ) {
+      LOGGER.debug(clazz.toString());
     }
 
-    public List<Class<?>> detectSerializableClasses() {
+    List<Class<?>> pojoClasses = detectPojoClasses(serializableClasses);
 
-        Set<Class<? extends Serializable>> classes =
-                reflections.getSubTypesOf(java.io.Serializable.class);
-
-        List<Class<?>> result = Lists.newArrayList();
-
-        for( Class clazz : classes ) {
-            result.add(clazz);
-        }
-
-        return result;
+    LOGGER.info("Detected {} pojos:", pojoClasses.size());
+    for ( Class clazz : pojoClasses ) {
+      LOGGER.debug(clazz.toString());
     }
 
-    public List<Class<?>> detectPojoClasses(List<Class<?>> classes) {
+    List<Class<?>> traits = detectTraits(pojoClasses);
 
-        List<Class<?>> result = Lists.newArrayList();
+    LOGGER.info("Detected {} traits:", traits.size());
+    for ( Class clazz : traits ) {
+      LOGGER.debug(clazz.toString());
+    }
 
-        for( Class clazz : classes ) {
-            try {
-                clazz.newInstance().toString();
-            } catch( Exception e) {}
-            // super-halfass way to know if this is a jsonschema2pojo
-            if( clazz.getAnnotations().length >= 1 )
-                result.add(clazz);
-        }
+    List<Class<?>> cases = detectCases(pojoClasses);
 
-        return result;
+    LOGGER.info("Detected {} cases:", cases.size());
+    for ( Class clazz : cases ) {
+      LOGGER.debug(clazz.toString());
     }
 
-    public List<Class<?>> detectTraits(List<Class<?>> classes) {
+    for ( Class clazz : traits ) {
+      String pojoPath = clazz.getPackage().getName().replace(".pojo.json", ".scala").replace(".","/") + "/traits/";
+      String pojoName = clazz.getSimpleName() + ".scala";
+      String pojoScala = renderTrait(clazz);
+      writeFile(outDir + "/" + pojoPath + pojoName, pojoScala);
+    }
 
-        List<Class<?>> traits = Lists.newArrayList();
+    for ( Class clazz : traits ) {
+      String pojoPath = clazz.getPackage().getName().replace(".pojo.json", ".scala").replace(".","/") + "/";
+      String pojoName = clazz.getSimpleName() + ".scala";
+      String pojoScala = renderClass(clazz);
+      writeFile(outDir + "/" + pojoPath + pojoName, pojoScala);
+    }
 
-        for( Class clazz : classes ) {
-            if (reflections.getSubTypesOf(clazz).size() > 0)
-                traits.add(clazz);
-        }
+    for ( Class clazz : cases ) {
+      String pojoPath = clazz.getPackage().getName().replace(".pojo.json", ".scala").replace(".","/") + "/";
+      String pojoName = clazz.getSimpleName() + ".scala";
+      String pojoScala = renderCase(clazz);
+      writeFile(outDir + "/" + pojoPath + pojoName, pojoScala);
+    }
 
-        return traits;
+  }
+
+  private void writeFile(String pojoFile, String pojoScala) {
+    try {
+      File path = new File(pojoFile);
+      File dir = path.getParentFile();
+      if ( !dir.exists() ) {
+        dir.mkdirs();
+      }
+      Files.write(Paths.get(pojoFile), pojoScala.getBytes(), StandardOpenOption.CREATE_NEW);
+    } catch (Exception ex) {
+      LOGGER.error("Write Exception: {}", ex);
     }
+  }
 
-    public List<Class<?>> detectCases(List<Class<?>> classes) {
+  /**
+   * detectSerializableClasses.
+   * @return List of Serializable Classes
+   */
+  public List<Class<?>> detectSerializableClasses() {
 
-        List<Class<?>> cases = Lists.newArrayList();
+    Set<Class<? extends Serializable>> classes =
+        reflections.getSubTypesOf(java.io.Serializable.class);
 
-        for( Class clazz : classes ) {
-            if (reflections.getSubTypesOf(clazz).size() == 0)
-                cases.add(clazz);
-        }
+    List<Class<?>> result = Lists.newArrayList();
 
-        return cases;
+    for ( Class clazz : classes ) {
+      result.add(clazz);
     }
 
+    return result;
+  }
+
+  /**
+   * detect which Classes are Pojo Classes.
+   * @param classes List of candidate Pojo Classes
+   * @return List of actual Pojo Classes
+   */
+  public List<Class<?>> detectPojoClasses(List<Class<?>> classes) {
+
+    List<Class<?>> result = Lists.newArrayList();
+
+    for ( Class clazz : classes ) {
+      try {
+        clazz.newInstance().toString();
+      } catch ( Exception ex) {
+        //
+      }
+      // super-halfass way to know if this is a jsonschema2pojo
+      if ( clazz.getAnnotations().length >= 1 ) {
+        result.add(clazz);
+      }
+    }
 
-    public String renderTrait(Class<?> pojoClass) {
-        StringBuffer stringBuffer = new StringBuffer();
-        stringBuffer.append("package ");
-        stringBuffer.append(pojoClass.getPackage().getName().replace(".pojo.json", ".scala"));
-        stringBuffer.append(".traits");
-        stringBuffer.append(LS);
-        stringBuffer.append("trait "+pojoClass.getSimpleName());
-        stringBuffer.append(" extends Serializable");
-        stringBuffer.append(" {");
+    return result;
+  }
 
-        Set<Field> fields = ReflectionUtils.getAllFields(pojoClass);
-        appendFields(stringBuffer, fields, "def", ";");
+  private List<Class<?>> detectTraits(List<Class<?>> classes) {
 
-        stringBuffer.append("}");
+    List<Class<?>> traits = Lists.newArrayList();
 
-        return stringBuffer.toString();
+    for ( Class clazz : classes ) {
+      if (reflections.getSubTypesOf(clazz).size() > 0) {
+        traits.add(clazz);
+      }
     }
 
-    public String renderClass(Class<?> pojoClass) {
-        StringBuffer stringBuffer = new StringBuffer();
-        stringBuffer.append("package ");
-        stringBuffer.append(pojoClass.getPackage().getName().replace(".pojo.json", ".scala"));
-        stringBuffer.append(LS);
-        stringBuffer.append("import org.apache.commons.lang.builder.{HashCodeBuilder, EqualsBuilder, ToStringBuilder}");
-        stringBuffer.append(LS);
-        stringBuffer.append("class "+pojoClass.getSimpleName());
-        stringBuffer.append(" (");
+    return traits;
+  }
 
-        Set<Field> fields = ReflectionUtils.getAllFields(pojoClass);
-        appendFields(stringBuffer, fields, "var", ",");
+  private List<Class<?>> detectCases(List<Class<?>> classes) {
 
-        stringBuffer.append(")");
-        stringBuffer.append(" extends "+pojoClass.getPackage().getName().replace(".pojo.json", ".scala")+".traits."+pojoClass.getSimpleName());
-        stringBuffer.append(" with Serializable ");
-        stringBuffer.append("{ ");
-        stringBuffer.append(LS);
-        stringBuffer.append("override def equals(obj: Any) = obj match { ");
-        stringBuffer.append(LS);
-        stringBuffer.append("  case other: ");
-        stringBuffer.append(pojoClass.getSimpleName());
-        stringBuffer.append(" => other.getClass == getClass && EqualsBuilder.reflectionEquals(this,obj)");
-        stringBuffer.append(LS);
-        stringBuffer.append("  case _ => false");
-        stringBuffer.append(LS);
-        stringBuffer.append("}");
-        stringBuffer.append(LS);
-        stringBuffer.append("override def hashCode = new HashCodeBuilder().hashCode");
-        stringBuffer.append(LS);
-        stringBuffer.append("}");
+    List<Class<?>> cases = Lists.newArrayList();
 
-        return stringBuffer.toString();
+    for ( Class clazz : classes ) {
+      if (reflections.getSubTypesOf(clazz).size() == 0) {
+        cases.add(clazz);
+      }
     }
 
-    public String renderCase(Class<?> pojoClass) {
-        StringBuffer stringBuffer = new StringBuffer();
-        stringBuffer.append("package ");
-        stringBuffer.append(pojoClass.getPackage().getName().replace(".pojo.json", ".scala"));
-        stringBuffer.append(LS);
-        stringBuffer.append("case class "+pojoClass.getSimpleName());
-        stringBuffer.append("(");
-        Set<Field> fields = ReflectionUtils.getAllFields(pojoClass);
-        appendFields(stringBuffer, fields, "var", ",");
-        stringBuffer.append(")");
-        if( pojoClass.getSuperclass() != null && !pojoClass.getSuperclass().equals(java.lang.Object.class)) {
-            stringBuffer.append(" extends "+pojoClass.getSuperclass().getPackage().getName().replace(".pojo.json", ".scala")+".traits."+pojoClass.getSuperclass().getSimpleName());
-        }
-        stringBuffer.append(LS);
-
-        return stringBuffer.toString();
+    return cases;
+  }
+
+  private String renderTrait(Class<?> pojoClass) {
+    StringBuffer stringBuffer = new StringBuffer();
+    stringBuffer.append("package ");
+    stringBuffer.append(pojoClass.getPackage().getName().replace(".pojo.json", ".scala"));
+    stringBuffer.append(".traits");
+    stringBuffer.append(LS);
+    stringBuffer.append("trait " + pojoClass.getSimpleName());
+    stringBuffer.append(" extends Serializable");
+    stringBuffer.append(" {");
+
+    Set<Field> fields = ReflectionUtils.getAllFields(pojoClass);
+    appendFields(stringBuffer, fields, "def", ";");
+
+    stringBuffer.append("}");
+
+    return stringBuffer.toString();
+  }
+
+  private String renderClass(Class<?> pojoClass) {
+    StringBuffer stringBuffer = new StringBuffer();
+    stringBuffer.append("package ");
+    stringBuffer.append(pojoClass.getPackage().getName().replace(".pojo.json", ".scala"));
+    stringBuffer.append(LS);
+    stringBuffer.append("import org.apache.commons.lang.builder.{HashCodeBuilder, EqualsBuilder, ToStringBuilder}");
+    stringBuffer.append(LS);
+    stringBuffer.append("class " + pojoClass.getSimpleName());
+    stringBuffer.append(" (");
+
+    Set<Field> fields = ReflectionUtils.getAllFields(pojoClass);
+    appendFields(stringBuffer, fields, "var", ",");
+
+    stringBuffer.append(")");
+    stringBuffer.append(" extends " + pojoClass.getPackage().getName().replace(".pojo.json", ".scala") + ".traits." + pojoClass.getSimpleName());
+    stringBuffer.append(" with Serializable ");
+    stringBuffer.append("{ ");
+    stringBuffer.append(LS);
+    stringBuffer.append("override def equals(obj: Any) = obj match { ");
+    stringBuffer.append(LS);
+    stringBuffer.append("  case other: ");
+    stringBuffer.append(pojoClass.getSimpleName());
+    stringBuffer.append(" => other.getClass == getClass && EqualsBuilder.reflectionEquals(this,obj)");
+    stringBuffer.append(LS);
+    stringBuffer.append("  case _ => false");
+    stringBuffer.append(LS);
+    stringBuffer.append("}");
+    stringBuffer.append(LS);
+    stringBuffer.append("override def hashCode = new HashCodeBuilder().hashCode");
+    stringBuffer.append(LS);
+    stringBuffer.append("}");
+
+    return stringBuffer.toString();
+  }
+
+  private String renderCase(Class<?> pojoClass) {
+    StringBuffer stringBuffer = new StringBuffer();
+    stringBuffer.append("package ");
+    stringBuffer.append(pojoClass.getPackage().getName().replace(".pojo.json", ".scala"));
+    stringBuffer.append(LS);
+    stringBuffer.append("case class " + pojoClass.getSimpleName());
+    stringBuffer.append("(");
+    Set<Field> fields = ReflectionUtils.getAllFields(pojoClass);
+    appendFields(stringBuffer, fields, "var", ",");
+    stringBuffer.append(")");
+    if ( pojoClass.getSuperclass() != null && !pojoClass.getSuperclass().equals(java.lang.Object.class)) {
+      stringBuffer.append(" extends " + pojoClass.getSuperclass().getPackage().getName().replace(".pojo.json", ".scala") + ".traits." + pojoClass.getSuperclass().getSimpleName());
     }
-
-    private void appendFields(StringBuffer stringBuffer, Set<Field> fields, String varDef, String fieldDelimiter) {
-        if( fields.size() > 0 ) {
-            stringBuffer.append(LS);
-            Map<String,Field> fieldsToAppend = uniqueFields(fields);
-            for( Iterator<Field> iter = fieldsToAppend.values().iterator(); iter.hasNext(); ) {
-                Field field = iter.next();
-                if( override( field ) )
-                    stringBuffer.append("override ");
-                stringBuffer.append(varDef);
-                stringBuffer.append(" ");
-                stringBuffer.append(name(field));
-                stringBuffer.append(": ");
-                if( option(field) ) {
-                    stringBuffer.append("scala.Option[");
-                    stringBuffer.append(type(field));
-                    stringBuffer.append("]");
-                } else {
-                    stringBuffer.append(type(field));
-                }
-                if( !fieldDelimiter.equals(";") && value(field) != null) {
-                    stringBuffer.append(" = ");
-                    if( option(field) ) {
-                        stringBuffer.append("scala.Some(");
-                        stringBuffer.append(value(field));
-                        stringBuffer.append(")");
-                    } else {
-                        stringBuffer.append(value(field));
-                    }
-                }
-                if( iter.hasNext()) stringBuffer.append(fieldDelimiter);
-                stringBuffer.append(LS);
-            }
+    stringBuffer.append(LS);
+
+    return stringBuffer.toString();
+  }
+
+  private void appendFields(StringBuffer stringBuffer, Set<Field> fields, String varDef, String fieldDelimiter) {
+    if ( fields.size() > 0 ) {
+      stringBuffer.append(LS);
+      Map<String,Field> fieldsToAppend = uniqueFields(fields);
+      for ( Iterator<Field> iter = fieldsToAppend.values().iterator(); iter.hasNext(); ) {
+        Field field = iter.next();
+        if ( override( field ) ) {
+          stringBuffer.append("override ");
+        }
+        stringBuffer.append(varDef);
+        stringBuffer.append(" ");
+        stringBuffer.append(name(field));
+        stringBuffer.append(": ");
+        if ( option(field) ) {
+          stringBuffer.append("scala.Option[");
+          stringBuffer.append(type(field));
+          stringBuffer.append("]");
         } else {
-            stringBuffer.append(LS);
+          stringBuffer.append(type(field));
+        }
+        if ( !fieldDelimiter.equals(";") && value(field) != null) {
+          stringBuffer.append(" = ");
+          if ( option(field) ) {
+            stringBuffer.append("scala.Some(");
+            stringBuffer.append(value(field));
+            stringBuffer.append(")");
+          } else {
+            stringBuffer.append(value(field));
+          }
         }
+        if ( iter.hasNext()) {
+          stringBuffer.append(fieldDelimiter);
+        }
+        stringBuffer.append(LS);
+      }
+    } else {
+      stringBuffer.append(LS);
     }
-
-    private boolean option(Field field) {
-        if( field.getName().equals("verb")) {
-            return false;
-        } else if( field.getType().equals(java.util.Map.class)) {
-            return false;
-        } else if( field.getType().equals(java.util.List.class)) {
-            return false;
-        } else return true;
+  }
+
+  private boolean option(Field field) {
+    if ( field.getName().equals("verb")) {
+      return false;
+    } else if ( field.getType().equals(java.util.Map.class)) {
+      return false;
+    } else if ( field.getType().equals(java.util.List.class)) {
+      return false;
+    } else {
+      return true;
     }
-
-    private String value(Field field) {
-        if( field.getName().equals("verb")) {
-            return "\"post\"";
-        } else if( field.getName().equals("objectType")) {
-            return "\"application\"";
-        } else return null;
+  }
+
+  private String value(Field field) {
+    if ( field.getName().equals("verb")) {
+      return "\"post\"";
+    } else if ( field.getName().equals("objectType")) {
+      return "\"application\"";
+    } else {
+      return null;
     }
-
-    private String type(Field field) {
-        if( field.getType().equals(java.lang.String.class)) {
-            return "String";
-        } else if( field.getType().equals(java.util.Map.class)) {
-            return "scala.collection.mutable.Map[String,Any]";
-        } else if( field.getType().equals(java.util.List.class)) {
-            return "scala.collection.mutable.MutableList[Any]";
-        }
-        return field.getType().getCanonicalName().replace(".pojo.json", ".scala");
+  }
+
+  private String type(Field field) {
+    if ( field.getType().equals(java.lang.String.class)) {
+      return "String";
+    } else if ( field.getType().equals(java.util.Map.class)) {
+      return "scala.collection.mutable.Map[String,Any]";
+    } else if ( field.getType().equals(java.util.List.class)) {
+      return "scala.collection.mutable.MutableList[Any]";
     }
-
-    private Map<String,Field> uniqueFields(Set<Field> fieldset) {
-        Map<String,Field> fields = Maps.newTreeMap();
-        Field item = null;
-        for( Iterator<Field> it = fieldset.iterator(); it.hasNext(); item = it.next() ) {
-            if( item != null && item.getName() != null ) {
-                Field added = fields.put(item.getName(), item);
-            }
-            // ensure right class will get used
-        }
-        return fields;
+    return field.getType().getCanonicalName().replace(".pojo.json", ".scala");
+  }
+
+  private Map<String,Field> uniqueFields(Set<Field> fieldset) {
+    Map<String,Field> fields = Maps.newTreeMap();
+    Field item = null;
+    for ( Iterator<Field> it = fieldset.iterator(); it.hasNext(); item = it.next() ) {
+      if ( item != null && item.getName() != null ) {
+        Field added = fields.put(item.getName(), item);
+      }
+      // ensure right class will get used
     }
-
-    private String name(Field field) {
-        if( field.getName().equals("object"))
-            return "obj";
-        else return field.getName();
+    return fields;
+  }
+
+  private String name(Field field) {
+    if ( field.getName().equals("object")) {
+      return "obj";
+    } else {
+      return field.getName();
     }
-
-    private boolean override(Field field) {
-        try {
-            if( field.getDeclaringClass().getSuperclass().getField(field.getName()) != null )
-                return true;
-            else return false;
-        } catch( Exception e ) {
-            return false;
-        }
+  }
+
+  private boolean override(Field field) {
+    try {
+      if ( field.getDeclaringClass().getSuperclass().getField(field.getName()) != null ) {
+        return true;
+      } else {
+        return false;
+      }
+    } catch ( Exception ex ) {
+      return false;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGeneratorMojo.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGeneratorMojo.java b/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGeneratorMojo.java
index ae91b36..44c82ac 100644
--- a/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGeneratorMojo.java
+++ b/streams-plugins/streams-plugin-scala/src/main/java/org/apache/streams/plugins/StreamsScalaSourceGeneratorMojo.java
@@ -20,9 +20,9 @@
 package org.apache.streams.plugins;
 
 import com.google.common.base.Splitter;
-import com.google.common.base.Strings;
 import org.apache.maven.plugin.AbstractMojo;
 import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugin.MojoFailureException;
 import org.apache.maven.plugins.annotations.Component;
 import org.apache.maven.plugins.annotations.Execute;
 import org.apache.maven.plugins.annotations.LifecyclePhase;
@@ -34,52 +34,59 @@ import org.slf4j.LoggerFactory;
 
 import java.io.File;
 
-@Mojo(  name = "scala",
-        defaultPhase = LifecyclePhase.GENERATE_SOURCES
-)
-@Execute(   goal = "scala",
-            phase = LifecyclePhase.GENERATE_SOURCES
-)
+@Mojo(
+    name = "scala",
+    defaultPhase = LifecyclePhase.GENERATE_SOURCES
+    )
+@Execute(
+    goal = "scala",
+    phase = LifecyclePhase.GENERATE_SOURCES
+    )
 public class StreamsScalaSourceGeneratorMojo extends AbstractMojo {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGeneratorMojo.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGeneratorMojo.class);
 
-    @Component
-    private MavenProject project;
+  @Component
+  private MavenProject project;
 
-//    @Component
-//    private Settings settings;
-//
-//    @Parameter( defaultValue = "${localRepository}", readonly = true, required = true )
-//    protected ArtifactRepository localRepository;
-//
-//    @Parameter( defaultValue = "${plugin}", readonly = true ) // Maven 3 only
-//    private PluginDescriptor plugin;
-//
-    @Parameter( defaultValue = "${project.basedir}", readonly = true )
-    private File basedir;
+  //    @Component
+  //    private Settings settings;
+  //
+  //    @Parameter( defaultValue = "${localRepository}", readonly = true, required = true )
+  //    protected ArtifactRepository localRepository;
+  //
+  //    @Parameter( defaultValue = "${plugin}", readonly = true ) // Maven 3 only
+  //    private PluginDescriptor plugin;
+  //
+  @Parameter( defaultValue = "${project.basedir}", readonly = true )
+  private File basedir;
 
-    @Parameter(defaultValue = "${project.build.directory}", readonly = true)
-    private File target;
+  @Parameter(defaultValue = "${project.build.directory}", readonly = true)
+  private File target;
 
-    @Parameter(defaultValue = "org.apache.streams.pojo.json", readonly = true)
-    private String packages;
+  @Parameter(defaultValue = "org.apache.streams.pojo.json", readonly = true)
+  private String packages;
 
-    public void execute() throws MojoExecutionException {
-        StreamsScalaGenerationConfig config = new StreamsScalaGenerationConfig();
-        config.setSourcePackages(Splitter.on(',').splitToList(packages));
-        config.setTargetDirectory(target.toString());
+  /**
+   * execute StreamsScalaSourceGeneratorMojo.
+   * @throws MojoExecutionException MojoExecutionException
+   * @throws MojoFailureException MojoFailureException
+   */
+  public void execute() throws MojoExecutionException {
+    StreamsScalaGenerationConfig config = new StreamsScalaGenerationConfig();
+    config.setSourcePackages(Splitter.on(',').splitToList(packages));
+    config.setTargetDirectory(target.toString());
 
-        StreamsScalaSourceGenerator streamsScalaSourceGenerator = new StreamsScalaSourceGenerator(config);
+    StreamsScalaSourceGenerator streamsScalaSourceGenerator = new StreamsScalaSourceGenerator(config);
 
-        streamsScalaSourceGenerator.run();
-    }
+    streamsScalaSourceGenerator.run();
+  }
 
-    public File getTarget() {
-        return target;
-    }
+  public File getTarget() {
+    return target;
+  }
 
-    public String getPackages() {
-        return packages;
-    }
+  public String getPackages() {
+    return packages;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorCLITest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorCLITest.java b/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorCLITest.java
index 6394193..0365af4 100644
--- a/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorCLITest.java
+++ b/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorCLITest.java
@@ -19,9 +19,10 @@
 
 package org.apache.streams.plugins.test;
 
+import org.apache.streams.plugins.StreamsScalaSourceGenerator;
+
 import com.google.common.collect.Lists;
 import com.google.common.io.Files;
-import org.apache.streams.plugins.StreamsScalaSourceGenerator;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -33,31 +34,31 @@ import java.util.List;
 import static org.apache.streams.plugins.test.StreamsScalaSourceGeneratorTest.scalaFilter;
 
 /**
- * Created by sblackmon on 5/5/16.
+ * Test whether StreamsScalaSourceGeneratorCLI generates sources.
  */
 public class StreamsScalaSourceGeneratorCLITest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGeneratorCLITest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGeneratorCLITest.class);
 
-    @Test
-    public void testStreamsScalaSourceGeneratorCLI() throws Exception {
+  @Test
+  public void testStreamsScalaSourceGeneratorCLI() throws Exception {
 
-        String sourcePackages = "org.apache.streams.pojo.json";
-        String targetPackage = "org.apache.streams.scala";
-        String targetDirectory = "./target/generated-sources/scala-cli";
+    String sourcePackages = "org.apache.streams.pojo.json";
+    String targetPackage = "org.apache.streams.scala";
+    String targetDirectory = "./target/generated-sources/scala-cli";
 
-        List<String> argsList = Lists.newArrayList(sourcePackages, targetDirectory, targetPackage);
-        StreamsScalaSourceGenerator.main(argsList.toArray(new String[argsList.size()]));
+    List<String> argsList = Lists.newArrayList(sourcePackages, targetDirectory, targetPackage);
+    StreamsScalaSourceGenerator.main(argsList.toArray(new String[argsList.size()]));
 
-        File testOutput = new File(targetDirectory);
+    File testOutput = new File(targetDirectory);
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(scalaFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() > 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(scalaFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() > 133 );
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorMojoIT.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorMojoIT.java b/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorMojoIT.java
index bcd988f..a0caecf 100644
--- a/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorMojoIT.java
+++ b/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorMojoIT.java
@@ -41,43 +41,42 @@ import static org.apache.streams.plugins.test.StreamsScalaSourceGeneratorTest.sc
  */
 public class StreamsScalaSourceGeneratorMojoIT extends TestCase {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGeneratorMojoIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGeneratorMojoIT.class);
 
-    protected void setUp() throws Exception
-    {
-        // required for mojo lookups to work
-        super.setUp();
-    }
+  protected void setUp() throws Exception {
+    // required for mojo lookups to work
+    super.setUp();
+  }
 
 
-    @Test
-    public void testStreamsScalaSourceGeneratorMojo() throws Exception {
+  @Test
+  public void testStreamsScalaSourceGeneratorMojo() throws Exception {
 
-        File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-scala" );
+    File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-scala" );
 
-        Verifier verifier;
+    Verifier verifier;
 
-        verifier = new Verifier( testDir.getAbsolutePath() );
+    verifier = new Verifier( testDir.getAbsolutePath() );
 
-        List cliOptions = new ArrayList();
-        cliOptions.add( "-N" );
-        verifier.executeGoals( Lists.<String>newArrayList(
-                "compile"));
+    List cliOptions = new ArrayList();
+    cliOptions.add( "-N" );
+    verifier.executeGoals( Lists.<String>newArrayList(
+        "compile"));
 
-        verifier.verifyErrorFreeLog();
+    verifier.verifyErrorFreeLog();
 
-        verifier.resetStreams();
+    verifier.resetStreams();
 
-        File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-sources/scala-mojo");
+    File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-sources/scala-mojo");
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(scalaFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() > 133 );
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(scalaFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() > 133 );
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorTest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorTest.java b/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorTest.java
index fda4416..1fafaf4 100644
--- a/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorTest.java
+++ b/streams-plugins/streams-plugin-scala/src/test/java/org/apache/streams/plugins/test/StreamsScalaSourceGeneratorTest.java
@@ -19,78 +19,84 @@
 
 package org.apache.streams.plugins.test;
 
-import com.google.common.base.Predicate;
-import com.google.common.collect.Lists;
 import org.apache.streams.plugins.StreamsScalaGenerationConfig;
 import org.apache.streams.plugins.StreamsScalaSourceGenerator;
+
+import com.google.common.base.Predicate;
+import com.google.common.collect.Lists;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
 import java.io.File;
 import java.io.FileFilter;
+import javax.annotation.Nullable;
 
 import static junit.framework.TestCase.assertEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
 /**
- * Test that Activity beans are compatible with the example activities in the spec.
+ * Tests that StreamsScalaSourceGenerator via SDK generates scala sources.
  */
 public class StreamsScalaSourceGeneratorTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGeneratorTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsScalaSourceGeneratorTest.class);
 
-    public static final Predicate<File> scalaFilter = new Predicate<File>() {
-        @Override
-        public boolean apply(@Nullable File file) {
-            if( file.getName().endsWith(".scala") )
-                return true;
-            else return false;
-        }
-    };
-    /**
-     * Tests that all example activities can be loaded into Activity beans
-     *
-     * @throws Exception
-     */
-    @Test
-    public void testStreamsScalaSourceGenerator() throws Exception {
+  public static final Predicate<File> scalaFilter = new Predicate<File>() {
+    @Override
+    public boolean apply(@Nullable File file) {
+      if ( file.getName().endsWith(".scala") ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  };
 
-        StreamsScalaGenerationConfig streamsScalaGenerationConfig = new StreamsScalaGenerationConfig();
-        streamsScalaGenerationConfig.setSourcePackages(Lists.newArrayList("org.apache.streams.pojo.json"));
-        streamsScalaGenerationConfig.setTargetPackage("org.apache.streams.scala");
-        streamsScalaGenerationConfig.setTargetDirectory("target/generated-sources/scala-test");
+  /**
+   * Tests that StreamsScalaSourceGenerator via SDK generates scala sources.
+   *
+   * @throws Exception Exception
+   */
+  @Test
+  public void testStreamsScalaSourceGenerator() throws Exception {
 
-        StreamsScalaSourceGenerator streamsScalaSourceGenerator = new StreamsScalaSourceGenerator(streamsScalaGenerationConfig);
-        streamsScalaSourceGenerator.run();
+    StreamsScalaGenerationConfig streamsScalaGenerationConfig = new StreamsScalaGenerationConfig();
+    streamsScalaGenerationConfig.setSourcePackages(Lists.newArrayList("org.apache.streams.pojo.json"));
+    streamsScalaGenerationConfig.setTargetPackage("org.apache.streams.scala");
+    streamsScalaGenerationConfig.setTargetDirectory("target/generated-sources/scala-test");
 
-        File testOutput = new File( "./target/generated-sources/scala-test/org/apache/streams/scala");
-        FileFilter scalaFilter = new FileFilter() {
-            @Override
-            public boolean accept(File pathname) {
-                if( pathname.getName().endsWith(".scala") )
-                    return true;
-                return false;
-            }
-        };
+    StreamsScalaSourceGenerator streamsScalaSourceGenerator = new StreamsScalaSourceGenerator(streamsScalaGenerationConfig);
+    streamsScalaSourceGenerator.run();
 
-        assertNotNull( testOutput );
-        assertTrue( testOutput.exists() );
-        assertTrue( testOutput.isDirectory() );
-        assertEquals( 10, testOutput.listFiles(scalaFilter).length );
-        assertTrue( new File(testOutput + "/traits").exists() );
-        assertTrue( new File(testOutput + "/traits").isDirectory() );
-        assertNotNull( new File(testOutput + "/traits").listFiles(scalaFilter) );
-        assertEquals( 4, new File(testOutput + "/traits").listFiles(scalaFilter).length );
-        assertTrue( new File(testOutput + "/objectTypes").exists() );
-        assertTrue( new File(testOutput + "/objectTypes").isDirectory() );
-        assertNotNull( new File(testOutput + "/objectTypes").listFiles(scalaFilter) );
-        assertEquals( 42, new File(testOutput + "/objectTypes").listFiles(scalaFilter).length);
-        assertTrue( new File(testOutput + "/verbs").exists() );
-        assertTrue( new File(testOutput + "/verbs").isDirectory() );
-        assertNotNull( new File(testOutput + "/verbs").listFiles(scalaFilter) );
-        assertEquals( 89, new File(testOutput + "/verbs").listFiles(scalaFilter).length );
-    }
+    File testOutput = new File( "./target/generated-sources/scala-test/org/apache/streams/scala");
+    FileFilter scalaFilter = new FileFilter() {
+      @Override
+      public boolean accept(File pathname) {
+        if ( pathname.getName().endsWith(".scala") ) {
+          return true;
+        } else {
+          return false;
+        }
+      }
+    };
+
+    assertNotNull( testOutput );
+    assertTrue( testOutput.exists() );
+    assertTrue( testOutput.isDirectory() );
+    assertEquals( 10, testOutput.listFiles(scalaFilter).length );
+    assertTrue( new File(testOutput + "/traits").exists() );
+    assertTrue( new File(testOutput + "/traits").isDirectory() );
+    assertNotNull( new File(testOutput + "/traits").listFiles(scalaFilter) );
+    assertEquals( 4, new File(testOutput + "/traits").listFiles(scalaFilter).length );
+    assertTrue( new File(testOutput + "/objectTypes").exists() );
+    assertTrue( new File(testOutput + "/objectTypes").isDirectory() );
+    assertNotNull( new File(testOutput + "/objectTypes").listFiles(scalaFilter) );
+    assertEquals( 42, new File(testOutput + "/objectTypes").listFiles(scalaFilter).length);
+    assertTrue( new File(testOutput + "/verbs").exists() );
+    assertTrue( new File(testOutput + "/verbs").isDirectory() );
+    assertNotNull( new File(testOutput + "/verbs").listFiles(scalaFilter) );
+    assertEquals( 89, new File(testOutput + "/verbs").listFiles(scalaFilter).length );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo-extensions/src/main/java/org/apache/streams/data/util/PropertyUtil.java
----------------------------------------------------------------------
diff --git a/streams-pojo-extensions/src/main/java/org/apache/streams/data/util/PropertyUtil.java b/streams-pojo-extensions/src/main/java/org/apache/streams/data/util/PropertyUtil.java
index f5a4b55..827bc6a 100644
--- a/streams-pojo-extensions/src/main/java/org/apache/streams/data/util/PropertyUtil.java
+++ b/streams-pojo-extensions/src/main/java/org/apache/streams/data/util/PropertyUtil.java
@@ -18,17 +18,17 @@
 
 package org.apache.streams.data.util;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ArrayNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.fasterxml.jackson.databind.node.ValueNode;
-import com.google.common.base.Joiner;
 import com.google.common.base.Splitter;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 
 import java.util.Iterator;
 import java.util.List;
@@ -39,87 +39,88 @@ import java.util.Map;
  */
 public class PropertyUtil {
 
-    private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    public static Map<String, Object> flattenToMap(ObjectNode object) {
-        Map<String, Object> flatObject = Maps.newHashMap();
-        addKeys(new String(), object, flatObject, '.');
-        return flatObject;
-    }
-
-    public static ObjectNode flattenToObjectNode(ObjectNode object) {
-        Map<String, Object> flatObject = flattenToMap(object, '.');
-        addKeys(new String(), object, flatObject, '.');
-        return mapper.convertValue(flatObject, ObjectNode.class);
-    }
-
-    public static Map<String, Object> flattenToMap(ObjectNode object, char seperator) {
-        Map<String, Object> flatObject = Maps.newHashMap();
-        addKeys(new String(), object, flatObject, seperator);
-        return flatObject;
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  public static Map<String, Object> flattenToMap(ObjectNode object) {
+    Map<String, Object> flatObject = Maps.newHashMap();
+    addKeys(new String(), object, flatObject, '.');
+    return flatObject;
+  }
+
+  public static Map<String, Object> flattenToMap(ObjectNode object, char seperator) {
+    Map<String, Object> flatObject = Maps.newHashMap();
+    addKeys(new String(), object, flatObject, seperator);
+    return flatObject;
+  }
+
+  public static ObjectNode flattenToObjectNode(ObjectNode object) {
+    Map<String, Object> flatObject = flattenToMap(object, '.');
+    addKeys(new String(), object, flatObject, '.');
+    return mapper.convertValue(flatObject, ObjectNode.class);
+  }
+
+  public static ObjectNode flattenToObjectNode(ObjectNode object, char seperator) {
+    Map<String, Object> flatObject = flattenToMap(object, seperator);
+    addKeys(new String(), object, flatObject, seperator);
+    return mapper.convertValue(flatObject, ObjectNode.class);
+  }
+
+  private static void addKeys(String currentPath, JsonNode jsonNode, Map<String, Object> map, char seperator) {
+    if (jsonNode.isObject()) {
+      ObjectNode objectNode = (ObjectNode) jsonNode;
+      Iterator<Map.Entry<String, JsonNode>> iter = objectNode.fields();
+      String pathPrefix = currentPath.isEmpty() ? "" : currentPath + seperator;
+
+      while (iter.hasNext()) {
+        Map.Entry<String, JsonNode> entry = iter.next();
+        addKeys(pathPrefix + entry.getKey(), entry.getValue(), map, seperator);
+      }
+    } else if (jsonNode.isArray()) {
+      ArrayNode arrayNode = (ArrayNode) jsonNode;
+      map.put(currentPath, arrayNode);
+    } else if (jsonNode.isValueNode()) {
+      ValueNode valueNode = (ValueNode) jsonNode;
+      if ( valueNode.isTextual() ) {
+        map.put(currentPath, valueNode.asText());
+      } else if ( valueNode.isNumber() ) {
+        map.put(currentPath, valueNode);
+      }
     }
-
-    public static ObjectNode flattenToObjectNode(ObjectNode object, char seperator) {
-        Map<String, Object> flatObject = flattenToMap(object, seperator);
-        addKeys(new String(), object, flatObject, seperator);
-        return mapper.convertValue(flatObject, ObjectNode.class);
-    }
-
-    private static void addKeys(String currentPath, JsonNode jsonNode, Map<String, Object> map, char seperator) {
-        if (jsonNode.isObject()) {
-            ObjectNode objectNode = (ObjectNode) jsonNode;
-            Iterator<Map.Entry<String, JsonNode>> iter = objectNode.fields();
-            String pathPrefix = currentPath.isEmpty() ? "" : currentPath + seperator;
-
-            while (iter.hasNext()) {
-                Map.Entry<String, JsonNode> entry = iter.next();
-                addKeys(pathPrefix + entry.getKey(), entry.getValue(), map, seperator);
-            }
-        } else if (jsonNode.isArray()) {
-            ArrayNode arrayNode = (ArrayNode) jsonNode;
-            map.put(currentPath, arrayNode);
-        } else if (jsonNode.isValueNode()) {
-            ValueNode valueNode = (ValueNode) jsonNode;
-            if( valueNode.isTextual() )
-                map.put(currentPath, valueNode.asText());
-            else if ( valueNode.isNumber() )
-                map.put(currentPath, valueNode);
+  }
+
+  public static ObjectNode unflattenMap(Map<String, Object> object, char seperator) {
+    return unflattenObjectNode(mapper.convertValue(object, ObjectNode.class), seperator);
+  }
+
+  public static ObjectNode unflattenObjectNode(ObjectNode flatObject, char seperator) {
+    ObjectNode root = mapper.createObjectNode();
+    Iterator<Map.Entry<String, JsonNode>> iter = flatObject.fields();
+    while (iter.hasNext()) {
+      Map.Entry<String, JsonNode> item = iter.next();
+      String fullKey = item.getKey();
+      if ( !fullKey.contains(Character.valueOf(seperator).toString())) {
+        root.put(item.getKey(), item.getValue());
+      } else {
+        ObjectNode currentNode = root;
+        List<String> keyParts = Lists.newArrayList();
+        Iterables.addAll(keyParts, Splitter.on(seperator).split(item.getKey()));
+        Iterator<String> keyPartIterator = Iterables.limit(Splitter.on(seperator).split(item.getKey()), keyParts.size() - 1).iterator();
+        while ( keyPartIterator.hasNext()) {
+          String part = keyPartIterator.next();
+          if ( currentNode.has(part) && currentNode.get(part).isObject() ) {
+            currentNode = (ObjectNode) currentNode.get(part);
+          } else {
+            ObjectNode newNode = mapper.createObjectNode();
+            currentNode.put(part, newNode);
+            currentNode = newNode;
+          }
         }
-    }
+        currentNode.put(keyParts.get(keyParts.size() - 1), item.getValue());
 
-    public static ObjectNode unflattenMap(Map<String, Object> object, char seperator) {
-        return unflattenObjectNode(mapper.convertValue(object, ObjectNode.class), seperator);
-    }
-
-    public static ObjectNode unflattenObjectNode(ObjectNode flatObject, char seperator) {
-        ObjectNode root = mapper.createObjectNode();
-        Iterator<Map.Entry<String, JsonNode>> iter = flatObject.fields();
-        while (iter.hasNext()) {
-            Map.Entry<String, JsonNode> item = iter.next();
-            String fullKey = item.getKey();
-            if( !fullKey.contains(Character.valueOf(seperator).toString())) {
-                root.put(item.getKey(), item.getValue());
-            } else {
-                ObjectNode currentNode = root;
-                List<String> keyParts = Lists.newArrayList();
-                Iterables.addAll(keyParts, Splitter.on(seperator).split(item.getKey()));
-                Iterator<String> keyPartIterator = Iterables.limit(Splitter.on(seperator).split(item.getKey()), keyParts.size()-1).iterator();
-                while( keyPartIterator.hasNext()) {
-                    String part = keyPartIterator.next();
-                    if( currentNode.has(part) && currentNode.get(part).isObject() ) {
-                        currentNode = (ObjectNode) currentNode.get(part);
-                    } else {
-                        ObjectNode newNode = mapper.createObjectNode();
-                        currentNode.put(part, newNode);
-                        currentNode = newNode;
-                    }
-                };
-                currentNode.put(keyParts.get(keyParts.size()-1), item.getValue());
-
-            }
-        }
-        return root;
+      }
     }
+    return root;
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo-extensions/src/main/java/org/apache/streams/pojo/extensions/ExtensionUtil.java
----------------------------------------------------------------------
diff --git a/streams-pojo-extensions/src/main/java/org/apache/streams/pojo/extensions/ExtensionUtil.java b/streams-pojo-extensions/src/main/java/org/apache/streams/pojo/extensions/ExtensionUtil.java
index 7fe2c28..988a269 100644
--- a/streams-pojo-extensions/src/main/java/org/apache/streams/pojo/extensions/ExtensionUtil.java
+++ b/streams-pojo-extensions/src/main/java/org/apache/streams/pojo/extensions/ExtensionUtil.java
@@ -18,12 +18,13 @@
 
 package org.apache.streams.pojo.extensions;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Strings;
+
 import java.util.HashMap;
 import java.util.Map;
 
@@ -32,133 +33,131 @@ import java.util.Map;
  */
 public class ExtensionUtil {
 
-    public static final String DEFAULT_EXTENSION_PROPERTY = null;
-
-    private static final ExtensionUtil INSTANCE = new ExtensionUtil(DEFAULT_EXTENSION_PROPERTY);
-
-    private String extensionProperty;
-
-    public static ExtensionUtil getInstance(){
-        return INSTANCE;
-    }
-
-    public static ExtensionUtil getInstance(String property){
-        return new ExtensionUtil(property);
-    }
-
-    private ExtensionUtil(String extensionProperty) {
-        this.extensionProperty = extensionProperty;
-    }
-
-    /**
-     * Property on the activity object to use for extensions
-     */
-
-    private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    public Map<String, Object> getExtensions(Activity activity) {
-        return ensureExtensions(activity);
-    }
-
-    public Object getExtension(Activity activity, String key) {
-        Map<String,Object> extensions = ensureExtensions(activity);
-        return extensions.get(key);
-    }
-
-    public void setExtensions(Activity activity, Map<String, Object> extensions) {
-        activity.setAdditionalProperty(extensionProperty, extensions);
-    }
-
-    public void addExtension(Activity activity, String key, Object extension) {
-        Map<String,Object> extensions = ensureExtensions(activity);
-        extensions.put(key, extension);
-    }
-
-    public void addExtensions(Activity activity, Map<String, Object> extensions) {
-        for( Map.Entry<String, Object> item : extensions.entrySet())
-            addExtension(activity, item.getKey(), item.getValue());
-    }
-
-    public void removeExtension(Activity activity, String key) {
-        Map<String,Object> extensions = ensureExtensions(activity);
-        extensions.remove(key);
-    }
-
-    public Map<String, Object> getExtensions(ActivityObject object) {
-        ActivityObject activityObject = mapper.convertValue(object, ActivityObject.class);
-        return ensureExtensions(activityObject);
-    }
-
-    public Object getExtension(ActivityObject object, String key) {
-        Map<String,Object> extensions = ensureExtensions(object);
-        return extensions.get(key);
-    }
-
-    public void setExtensions(ActivityObject object, Map<String, Object> extensions) {
-        object.setAdditionalProperty(extensionProperty, extensions);
-    }
-
-    public void addExtension(ActivityObject object, String key, Object extension) {
-        Map<String,Object> extensions = ensureExtensions(object);
-        extensions.put(key, extension);
-    }
-
-    public void addExtensions(ActivityObject object, Map<String, Object> extensions) {
-        for( Map.Entry<String, Object> item : extensions.entrySet())
-            addExtension(object, item.getKey(), item.getValue());
-    }
-
-    public void removeExtension(ActivityObject object, String key) {
-        Map<String,Object> extensions = ensureExtensions(object);
-        extensions.remove(key);
-    }
-
-    /**
-     * Creates a standard extension property
-     * @param activity activity to create the property in
-     * @return the Map representing the extensions property
-     */
-    @SuppressWarnings("unchecked")
-    public Map<String, Object> ensureExtensions(Activity activity) {
-        Map<String,Object> additionalProperties = activity.getAdditionalProperties();
-        Map<String,Object> extensions;
-        if(additionalProperties == null) {
-            additionalProperties = new HashMap<>();
-        }
-        if( !Strings.isNullOrEmpty(extensionProperty) ) {
-            extensions = (Map<String, Object>) additionalProperties.get(extensionProperty);
-            if(extensions == null) {
-                extensions = new HashMap<>();
-                additionalProperties.put(extensionProperty, extensions);
-            }
-            return extensions;
-        } else {
-            return additionalProperties;
-        }
-    }
-
-    /**
-     * Creates a standard extension property
-     * @param object object node to create the property in
-     * @return {@link Map} representing the extensions property
-     */
-    @SuppressWarnings("unchecked")
-    public Map<String, Object> ensureExtensions(ActivityObject object) {
-        Map<String,Object> additionalProperties = object.getAdditionalProperties();
-        Map<String,Object> extensions;
-        if(additionalProperties == null) {
-            additionalProperties = new HashMap<>();
-        }
-        if( !Strings.isNullOrEmpty(extensionProperty) ) {
-            extensions = (Map<String, Object>) additionalProperties.get(extensionProperty);
-            if(extensions == null) {
-                extensions = new HashMap<>();
-                additionalProperties.put(extensionProperty, extensions);
-            }
-            return extensions;
-        } else {
-            return additionalProperties;
-        }
-    }
+  public static final String DEFAULT_EXTENSION_PROPERTY = null;
+
+  private static final ExtensionUtil INSTANCE = new ExtensionUtil(DEFAULT_EXTENSION_PROPERTY);
+
+  private String extensionProperty;
+
+  public static ExtensionUtil getInstance() {
+    return INSTANCE;
+  }
+
+  public static ExtensionUtil getInstance(String property) {
+    return new ExtensionUtil(property);
+  }
+
+  private ExtensionUtil(String extensionProperty) {
+    this.extensionProperty = extensionProperty;
+  }
+
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  public Map<String, Object> getExtensions(Activity activity) {
+    return ensureExtensions(activity);
+  }
+
+  public Map<String, Object> getExtensions(ActivityObject object) {
+    ActivityObject activityObject = mapper.convertValue(object, ActivityObject.class);
+    return ensureExtensions(activityObject);
+  }
+
+  public Object getExtension(Activity activity, String key) {
+    Map<String,Object> extensions = ensureExtensions(activity);
+    return extensions.get(key);
+  }
+
+  public Object getExtension(ActivityObject object, String key) {
+    Map<String,Object> extensions = ensureExtensions(object);
+    return extensions.get(key);
+  }
+
+  public void addExtension(Activity activity, String key, Object extension) {
+    Map<String,Object> extensions = ensureExtensions(activity);
+    extensions.put(key, extension);
+  }
+
+  public void addExtension(ActivityObject object, String key, Object extension) {
+    Map<String,Object> extensions = ensureExtensions(object);
+    extensions.put(key, extension);
+  }
+
+  public void removeExtension(Activity activity, String key) {
+    Map<String,Object> extensions = ensureExtensions(activity);
+    extensions.remove(key);
+  }
+
+  public void removeExtension(ActivityObject object, String key) {
+    Map<String,Object> extensions = ensureExtensions(object);
+    extensions.remove(key);
+  }
+
+  public void setExtensions(Activity activity, Map<String, Object> extensions) {
+    activity.setAdditionalProperty(extensionProperty, extensions);
+  }
+
+  public void setExtensions(ActivityObject object, Map<String, Object> extensions) {
+    object.setAdditionalProperty(extensionProperty, extensions);
+  }
+
+  public void addExtensions(Activity activity, Map<String, Object> extensions) {
+    for ( Map.Entry<String, Object> item : extensions.entrySet()) {
+      addExtension(activity, item.getKey(), item.getValue());
+    }
+  }
+
+  public void addExtensions(ActivityObject object, Map<String, Object> extensions) {
+    for ( Map.Entry<String, Object> item : extensions.entrySet()) {
+      addExtension(object, item.getKey(), item.getValue());
+    }
+  }
+
+  /**
+   * Creates a standard extension property.
+   * @param activity activity to create the property in
+   * @return the Map representing the extensions property
+   */
+  @SuppressWarnings("unchecked")
+  public Map<String, Object> ensureExtensions(Activity activity) {
+    Map<String,Object> additionalProperties = activity.getAdditionalProperties();
+    Map<String,Object> extensions;
+    if (additionalProperties == null) {
+      additionalProperties = new HashMap<>();
+    }
+    if ( !Strings.isNullOrEmpty(extensionProperty) ) {
+      extensions = (Map<String, Object>) additionalProperties.get(extensionProperty);
+      if (extensions == null) {
+        extensions = new HashMap<>();
+        additionalProperties.put(extensionProperty, extensions);
+      }
+      return extensions;
+    } else {
+      return additionalProperties;
+    }
+  }
+
+  /**
+   * Creates a standard extension property.
+   * @param object object node to create the property in
+   * @return {@link Map} representing the extensions property
+   */
+  @SuppressWarnings("unchecked")
+  public Map<String, Object> ensureExtensions(ActivityObject object) {
+    Map<String,Object> additionalProperties = object.getAdditionalProperties();
+    Map<String,Object> extensions;
+    if (additionalProperties == null) {
+      additionalProperties = new HashMap<>();
+    }
+    if ( !Strings.isNullOrEmpty(extensionProperty) ) {
+      extensions = (Map<String, Object>) additionalProperties.get(extensionProperty);
+      if (extensions == null) {
+        extensions = new HashMap<>();
+        additionalProperties.put(extensionProperty, extensions);
+      }
+      return extensions;
+    } else {
+      return additionalProperties;
+    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo-extensions/src/test/java/org/apache/streams/pojo/extensions/test/ExtensionUtilTest.java
----------------------------------------------------------------------
diff --git a/streams-pojo-extensions/src/test/java/org/apache/streams/pojo/extensions/test/ExtensionUtilTest.java b/streams-pojo-extensions/src/test/java/org/apache/streams/pojo/extensions/test/ExtensionUtilTest.java
index 5cce209..de49da4 100644
--- a/streams-pojo-extensions/src/test/java/org/apache/streams/pojo/extensions/test/ExtensionUtilTest.java
+++ b/streams-pojo-extensions/src/test/java/org/apache/streams/pojo/extensions/test/ExtensionUtilTest.java
@@ -18,34 +18,31 @@
 
 package org.apache.streams.pojo.extensions.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Strings;
-import com.google.common.collect.Maps;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.pojo.json.ActivityObject;
+
+import com.google.common.base.Strings;
 import org.junit.Test;
 
 import java.util.Map;
 
 /**
- *  Test ExtensionUtil methods
+ * Test ExtensionUtil methods.
  */
 public class ExtensionUtilTest {
 
-    @Test
-    public void testActivitySetCustomExtension() throws Exception {
-        ExtensionUtil customExtensionUtil = ExtensionUtil.getInstance("ext");
-        Activity activity = new Activity();
-        Map<String, Object> extensions = customExtensionUtil.ensureExtensions(activity);
-        String value = "value";
-        extensions.put("extension", value);
-        customExtensionUtil.setExtensions(activity, extensions);
-        assert(!Strings.isNullOrEmpty((String)customExtensionUtil.getExtension(activity, "extension")));
-        extensions = customExtensionUtil.getExtensions(activity);
-        assert(value.equals((String)extensions.get("extension")));
-        assert(activity.getAdditionalProperties().get("ext") != null);
-    }
+  @Test
+  public void testActivitySetCustomExtension() throws Exception {
+    ExtensionUtil customExtensionUtil = ExtensionUtil.getInstance("ext");
+    Activity activity = new Activity();
+    Map<String, Object> extensions = customExtensionUtil.ensureExtensions(activity);
+    String value = "value";
+    extensions.put("extension", value);
+    customExtensionUtil.setExtensions(activity, extensions);
+    assert (!Strings.isNullOrEmpty((String)customExtensionUtil.getExtension(activity, "extension")));
+    extensions = customExtensionUtil.getExtensions(activity);
+    assert (value.equals((String)extensions.get("extension")));
+    assert (activity.getAdditionalProperties().get("ext") != null);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/data/ActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/data/ActivityConverter.java b/streams-pojo/src/main/java/org/apache/streams/data/ActivityConverter.java
index 4d6d759..70f2135 100644
--- a/streams-pojo/src/main/java/org/apache/streams/data/ActivityConverter.java
+++ b/streams-pojo/src/main/java/org/apache/streams/data/ActivityConverter.java
@@ -27,61 +27,67 @@ import java.util.List;
 /**
  * Converts non-Activity documents to Activities and back.
  *
+ * <p/>
  * Each converter may one, several, or zero activities.
  *
+ * <p/>
  * The recommended approach for deriving multiple activities from a source document is:
  *
+ * <p/>
  *   1) Return one activity for each occurance of a verb, from the same ActivityConverter, if the activities are of like type.
  *
+ * <p/>
  *      For example, BlogShareConverter would convert a blog containing two shares into two Activities with verb: share
  *
+ * <p/>
  *   2) Create multiple ActivityConverters, if the activities are not of like type.
  *
+ * <p/>
  *      For example, a blog post that is both a post and a share should be transformed by two seperate Converters, individually
  *      or simultaneously applied.
  */
 public interface ActivityConverter<T> extends Serializable {
 
-    /**
-     * What class does this ActivityConverter require?
-     *
-     * @return The class the ActivityConverter requires.  Should always return the templated class.
-     */
-    Class requiredClass();
+  /**
+   * What class does this ActivityConverter require?
+   *
+   * @return The class the ActivityConverter requires.  Should always return the templated class.
+   */
+  Class requiredClass();
 
-    /**
-     * Gets the supported content type that can be deserialized/serialized
-     *
-     * @return A string representing the format name.  Can be an IETF MIME type or other
-     */
-    String serializationFormat();
+  /**
+   * Gets the supported content type that can be deserialized/serialized
+   *
+   * @return A string representing the format name.  Can be an IETF MIME type or other
+   */
+  String serializationFormat();
 
-    /**
-     * Converts the activity to a POJO representation.
-     *
-     * @param deserialized the string
-     * @return a fully populated Activity object
-     */
-    T fromActivity(Activity deserialized) throws ActivityConversionException;
+  /**
+   * Converts the activity to a POJO representation.
+   *
+   * @param deserialized the string
+   * @return a fully populated Activity object
+   */
+  T fromActivity(Activity deserialized) throws ActivityConversionException;
 
-    /**
-     * Converts a POJO into one or more Activities
-     * @param serialized the string representation
-     * @return a fully populated Activity object
-     */
-    List<Activity> toActivityList(T serialized) throws ActivityConversionException;
+  /**
+   * Converts multiple Activities into a list of source documents.
+   * @param list a typed List of documents
+   * @return a list of source documents
+   */
+  List<T> fromActivityList(List<Activity> list) throws ActivityConversionException;
 
-    /**
-     * Converts multiple Activities into a list of source documents
-     * @param list a typed List of documents
-     * @return a list of source documents
-     */
-    List<T> fromActivityList(List<Activity> list) throws ActivityConversionException;
+  /**
+   * Converts a POJO into one or more Activities.
+   * @param serialized the string representation
+   * @return a fully populated Activity object
+   */
+  List<Activity> toActivityList(T serialized) throws ActivityConversionException;
 
-    /**
-     * Converts multiple documents into a list of Activity objects
-     * @param list a typed List of documents
-     * @return a list of fully populated activities
-     */
-    List<Activity> toActivityList(List<T> list) throws ActivityConversionException;
+  /**
+   * Converts multiple documents into a list of Activity objects.
+   * @param list a typed List of documents
+   * @return a list of fully populated activities
+   */
+  List<Activity> toActivityList(List<T> list) throws ActivityConversionException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/data/ActivityObjectConverter.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/data/ActivityObjectConverter.java b/streams-pojo/src/main/java/org/apache/streams/data/ActivityObjectConverter.java
index 0d25d91..7e80327 100644
--- a/streams-pojo/src/main/java/org/apache/streams/data/ActivityObjectConverter.java
+++ b/streams-pojo/src/main/java/org/apache/streams/data/ActivityObjectConverter.java
@@ -22,44 +22,42 @@ import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.pojo.json.ActivityObject;
 
 import java.io.Serializable;
-import java.util.List;
 
 /**
  * Converts non-ActivityObject documents to ActivityObjects and back.
  *
+ * <p/>
  * Each converter may return zero or one alternative representations.
- *
  */
-
 public interface ActivityObjectConverter<T> extends Serializable {
 
-    /**
-     * What class does this ActivityConverter require?
-     *
-     * @return The class the ActivityConverter requires.  Should always return the templated class.
-     */
-    Class requiredClass();
-
-    /**
-     * Gets the supported content type that can be deserialized/serialized
-     *
-     * @return A string representing the format name.  Can be an IETF MIME type or other
-     */
-    String serializationFormat();
-
-    /**
-     * Converts the activity to a POJO representation.
-     *
-     * @param deserialized the string
-     * @return a fully populated Activity object
-     */
-    T fromActivityObject(ActivityObject deserialized) throws ActivityConversionException;
-
-    /**
-     * Converts a POJO into an ActivityObject
-     * @param serialized the string representation
-     * @return a fully populated Activity object
-     */
-    ActivityObject toActivityObject(T serialized) throws ActivityConversionException;
+  /**
+   * What class does this ActivityConverter require?
+   *
+   * @return The class the ActivityConverter requires.  Should always return the templated class.
+   */
+  Class requiredClass();
+
+  /**
+   * Gets the supported content type that can be deserialized/serialized.
+   *
+   * @return A string representing the format name.  Can be an IETF MIME type or other
+   */
+  String serializationFormat();
+
+  /**
+   * Converts the activity to a POJO representation.
+   *
+   * @param deserialized the string
+   * @return a fully populated Activity object
+   */
+  T fromActivityObject(ActivityObject deserialized) throws ActivityConversionException;
+
+  /**
+   * Converts a POJO into an ActivityObject.
+   * @param serialized the string representation
+   * @return a fully populated Activity object
+   */
+  ActivityObject toActivityObject(T serialized) throws ActivityConversionException;
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/data/ActivitySerializer.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/data/ActivitySerializer.java b/streams-pojo/src/main/java/org/apache/streams/data/ActivitySerializer.java
index 8ca0c04..3a4282c 100644
--- a/streams-pojo/src/main/java/org/apache/streams/data/ActivitySerializer.java
+++ b/streams-pojo/src/main/java/org/apache/streams/data/ActivitySerializer.java
@@ -26,37 +26,38 @@ import java.util.List;
 /**
  * Serializes and deserializes Activities
  *
+ * <p/>
  * Deprecated: Switch all modules to use {@link org.apache.streams.data.ActivityConverter}
  */
 @Deprecated
 public interface ActivitySerializer<T> {
 
-    /**
-     * Gets the supported content type that can be deserialized/serialized
-     *
-     * @return A string representing the format name.  Can be an IETF MIME type or other
-     */
-    String serializationFormat();
-
-    /**
-     * Converts the activity to a POJO representation.
-     *
-     * @param deserialized the string
-     * @return a fully populated Activity object
-     */
-    T serialize(Activity deserialized) throws ActivitySerializerException;
-
-    /**
-     * Converts a POJO into an Activity
-     * @param serialized the string representation
-     * @return a fully populated Activity object
-     */
-    Activity deserialize(T serialized) throws ActivitySerializerException;
-
-    /**
-     * Converts multiple documents into a list of Activity objects
-     * @param serializedList a typed List of documents
-     * @return a list of fully populated activities
-     */
-    List<Activity> deserializeAll(List<T> serializedList);
+  /**
+   * Gets the supported content type that can be deserialized/serialized.
+   *
+   * @return A string representing the format name.  Can be an IETF MIME type or other
+   */
+  String serializationFormat();
+
+  /**
+   * Converts the activity to a POJO representation.
+   *
+   * @param deserialized the string
+   * @return a fully populated Activity object
+   */
+  T serialize(Activity deserialized) throws ActivitySerializerException;
+
+  /**
+   * Converts a POJO into an Activity.
+   * @param serialized the string representation
+   * @return a fully populated Activity object
+   */
+  Activity deserialize(T serialized) throws ActivitySerializerException;
+
+  /**
+   * Converts multiple documents into a list of Activity objects.
+   * @param serializedList a typed List of documents
+   * @return a list of fully populated activities
+   */
+  List<Activity> deserializeAll(List<T> serializedList);
 }



[33/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/CleanAdditionalPropertiesProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/CleanAdditionalPropertiesProcessor.java b/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/CleanAdditionalPropertiesProcessor.java
index 6c08eb1..ae0709a 100644
--- a/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/CleanAdditionalPropertiesProcessor.java
+++ b/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/CleanAdditionalPropertiesProcessor.java
@@ -19,14 +19,15 @@ under the License.
 
 package org.apache.streams.jackson;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
 import com.google.common.collect.Lists;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,53 +37,58 @@ import java.util.Map;
 
 /**
  * This processor walks an input objectnode and corrects any artifacts
- * that may have occured from improper serialization of jsonschema2pojo beans.
+ * that may have occured from improper serialization of jackson beans.
  *
+ * <p/>
  * The logic is also available for inclusion in other module via static import.
  */
 public class CleanAdditionalPropertiesProcessor implements StreamsProcessor {
 
-    public static final String STREAMS_ID = "CleanAdditionalPropertiesProcessor";
+  public static final String STREAMS_ID = "CleanAdditionalPropertiesProcessor";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(CleanAdditionalPropertiesProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(CleanAdditionalPropertiesProcessor.class);
 
-    private ObjectMapper mapper;
+  private ObjectMapper mapper;
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum datum) {
-        List<StreamsDatum> result = Lists.newLinkedList();
-        ObjectNode activity = this.mapper.convertValue(datum.getDocument(), ObjectNode.class);
-        cleanAdditionalProperties(activity);
-        datum.setDocument(activity);
-        result.add(datum);
-        return result;
-    }
+  @Override
+  public List<StreamsDatum> process(StreamsDatum datum) {
+    List<StreamsDatum> result = Lists.newLinkedList();
+    ObjectNode activity = this.mapper.convertValue(datum.getDocument(), ObjectNode.class);
+    cleanAdditionalProperties(activity);
+    datum.setDocument(activity);
+    result.add(datum);
+    return result;
+  }
 
-    @Override
-    public void prepare(Object o) {
-        this.mapper = StreamsJacksonMapper.getInstance();
-        this.mapper.registerModule(new JsonOrgModule());
-    }
+  @Override
+  public void prepare(Object configurationObject) {
+    this.mapper = StreamsJacksonMapper.getInstance();
+    this.mapper.registerModule(new JsonOrgModule());
+  }
 
-    @Override
-    public void cleanUp() {
+  @Override
+  public void cleanUp() {
 
-    }
+  }
 
-    public static void cleanAdditionalProperties(ObjectNode node) {
-        if( node.get("additionalProperties") != null ) {
-            ObjectNode additionalProperties = (ObjectNode) node.get("additionalProperties");
-            cleanAdditionalProperties(additionalProperties);
-            Iterator<Map.Entry<String, JsonNode>> jsonNodeIterator = additionalProperties.fields();
-            while( jsonNodeIterator.hasNext() ) {
-                Map.Entry<String, JsonNode> entry = jsonNodeIterator.next();
-                node.put(entry.getKey(), entry.getValue());
-            }
-        }
+  /**
+   * Recursively removes all additionalProperties maps.
+   * @param node ObjectNode
+   */
+  public static void cleanAdditionalProperties(ObjectNode node) {
+    if ( node.get("additionalProperties") != null ) {
+      ObjectNode additionalProperties = (ObjectNode) node.get("additionalProperties");
+      cleanAdditionalProperties(additionalProperties);
+      Iterator<Map.Entry<String, JsonNode>> jsonNodeIterator = additionalProperties.fields();
+      while ( jsonNodeIterator.hasNext() ) {
+        Map.Entry<String, JsonNode> entry = jsonNodeIterator.next();
+        node.put(entry.getKey(), entry.getValue());
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/JsonUtil.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/JsonUtil.java b/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/JsonUtil.java
new file mode 100644
index 0000000..ac4ff08
--- /dev/null
+++ b/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/JsonUtil.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.streams.jackson;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * JSON utilities.
+ */
+public class JsonUtil {
+
+  private JsonUtil() {}
+
+  private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static JsonFactory factory = mapper.getFactory();
+
+  public static JsonNode jsonToJsonNode(String json) {
+    JsonNode node;
+    try {
+      JsonParser jp = factory.createJsonParser(json);
+      node = mapper.readTree(jp);
+    } catch (IOException e) {
+      throw new RuntimeException("IO exception while reading JSON", e);
+    }
+    return node;
+  }
+
+  public static String jsonNodeToJson(JsonNode node) {
+    try {
+      return mapper.writeValueAsString(node);
+    } catch (JsonProcessingException e) {
+      throw new RuntimeException("IO exception while writing JSON", e);
+    }
+  }
+
+  public static <T> T jsonToObject(String json, Class<T> clazz) {
+    try {
+      return mapper.readValue(json, clazz);
+    } catch (IOException e) {
+      throw new RuntimeException("Could not map to object");
+    }
+  }
+
+  public static <T> T jsonNodeToObject(JsonNode node, Class<T> clazz) {
+    return mapper.convertValue(node, clazz);
+  }
+
+  public static <T> JsonNode objectToJsonNode(T obj) {
+    return mapper.valueToTree(obj);
+  }
+
+  public static <T> List<T> jsoNodeToList(JsonNode node, Class<T> clazz) {
+    return mapper.convertValue(node, new TypeReference<List<T>>() {});
+  }
+
+  public static <T> String objectToJson(T object) {
+    try {
+      return mapper.writeValueAsString(object);
+    } catch (IOException e) {
+      throw new RuntimeException("Could not map to object");
+    }
+  }
+
+  public static <T> T getObjFromFile(String filePath, Class<T> clazz) {
+    return jsonNodeToObject(getFromFile(filePath), clazz);
+  }
+
+  public static JsonNode getFromFile(String filePath) {
+    JsonFactory factory = mapper.getFactory(); // since 2.1 use mapper.getFactory() instead
+
+    JsonNode node = null;
+    try {
+      InputStream stream = getStreamForLocation(filePath);
+      JsonParser jp = factory.createParser(stream);
+      node = mapper.readTree(jp);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+    return node;
+  }
+
+  private static InputStream getStreamForLocation(String filePath) throws FileNotFoundException {
+    InputStream stream = null;
+    if(filePath.startsWith("file:///")) {
+      stream = new FileInputStream(filePath.replace("file:///", ""));
+    } else if(filePath.startsWith("file:") || filePath.startsWith("/")) {
+      stream = new FileInputStream(filePath.replace("file:", ""));
+    } else {
+      //Assume classpath
+      stream = JsonUtil.class.getClassLoader().getResourceAsStream(filePath.replace("classpath:", ""));
+    }
+
+    return stream;
+  }
+
+  /**
+   * Creates an empty array if missing
+   * @param node object to create the array within
+   * @param field location to create the array
+   * @return the Map representing the extensions property
+   */
+  public static ArrayNode ensureArray(ObjectNode node, String field) {
+    String[] path = Lists.newArrayList(Splitter.on('.').split(field)).toArray(new String[0]);
+    ObjectNode current = node;
+    ArrayNode result = null;
+    for( int i = 0; i < path.length; i++) {
+      current = ensureObject((ObjectNode) node.get(path[i]), path[i]);
+    }
+    if (current.get(field) == null)
+      current.put(field, mapper.createArrayNode());
+    result = (ArrayNode) node.get(field);
+    return result;
+  }
+
+  /**
+   * Creates an empty array if missing
+   * @param node objectnode to create the object within
+   * @param field location to create the object
+   * @return the Map representing the extensions property
+   */
+  public static ObjectNode ensureObject(ObjectNode node, String field) {
+    String[] path = Lists.newArrayList(Splitter.on('.').split(field)).toArray(new String[0]);
+    ObjectNode current = node;
+    ObjectNode result = null;
+    for( int i = 0; i < path.length; i++) {
+      if (node.get(field) == null)
+        node.put(field, mapper.createObjectNode());
+      current = (ObjectNode) node.get(field);
+    }
+    result = ensureObject((ObjectNode) node.get(path[path.length]), Joiner.on('.').join(Arrays.copyOfRange(path, 1, path.length)));
+    return result;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/TypeConverterProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/TypeConverterProcessor.java b/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/TypeConverterProcessor.java
index 454f99e..4736ee2 100644
--- a/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/TypeConverterProcessor.java
+++ b/streams-contrib/streams-processor-jackson/src/main/java/org/apache/streams/jackson/TypeConverterProcessor.java
@@ -19,102 +19,123 @@ under the License.
 
 package org.apache.streams.jackson;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.collect.Lists;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.io.Serializable;
 import java.util.List;
 
 /**
- *
+ * TypeConverterProcessor changes the JVM type while maintaining
+ * the underlying document.
  */
 public class TypeConverterProcessor implements StreamsProcessor {
 
-    public static final String STREAMS_ID = "TypeConverterProcessor";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(TypeConverterProcessor.class);
-
-    private List<String> formats = Lists.newArrayList();
-
-    private ObjectMapper mapper;
-
-    private Class inClass;
-    private Class outClass;
-
-    public TypeConverterProcessor(Class inClass, Class outClass, ObjectMapper mapper) {
-        this.inClass = inClass;
-        this.outClass = outClass;
-        this.mapper = mapper;
+  public static final String STREAMS_ID = "TypeConverterProcessor";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(TypeConverterProcessor.class);
+
+  private List<String> formats = Lists.newArrayList();
+
+  private ObjectMapper mapper;
+
+  private Class inClass;
+  private Class outClass;
+
+
+  /**
+   * TypeConverterProcessor constructor.
+   * @param inClass inClass
+   * @param outClass outClass
+   * @param mapper mapper
+   */
+  public TypeConverterProcessor(Class inClass, Class outClass, ObjectMapper mapper) {
+    this.inClass = inClass;
+    this.outClass = outClass;
+    this.mapper = mapper;
+  }
+
+  /**
+   * TypeConverterProcessor constructor.
+   * @param inClass inClass
+   * @param outClass outClass
+   * @param formats formats
+   */
+  public TypeConverterProcessor(Class inClass, Class outClass, List<String> formats) {
+    this.inClass = inClass;
+    this.outClass = outClass;
+    this.formats = formats;
+  }
+
+  /**
+   * TypeConverterProcessor constructor.
+   * @param inClass inClass
+   * @param outClass outClass
+   */
+  public TypeConverterProcessor(Class inClass, Class outClass) {
+    this.inClass = inClass;
+    this.outClass = outClass;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    List<StreamsDatum> result = Lists.newLinkedList();
+    Object inDoc = entry.getDocument();
+    ObjectNode node = null;
+    if ( inClass == String.class
+          || inDoc instanceof String ) {
+      try {
+        node = this.mapper.readValue((String)entry.getDocument(), ObjectNode.class);
+      } catch (IOException ex) {
+        ex.printStackTrace();
+      }
+    } else {
+      node = this.mapper.convertValue(inDoc, ObjectNode.class);
     }
 
-    public TypeConverterProcessor(Class inClass, Class outClass, List<String> formats) {
-        this.inClass = inClass;
-        this.outClass = outClass;
-        this.formats = formats;
-    }
-
-    public TypeConverterProcessor(Class inClass, Class outClass) {
-        this.inClass = inClass;
-        this.outClass = outClass;
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        List<StreamsDatum> result = Lists.newLinkedList();
-        Object inDoc = entry.getDocument();
-        ObjectNode node = null;
-        if( inClass == String.class ||
-            inDoc instanceof String ) {
-            try {
-                node = this.mapper.readValue((String)entry.getDocument(), ObjectNode.class);
-            } catch (IOException e) {
-                e.printStackTrace();
-            }
+    if (node != null) {
+      Object outDoc;
+      try {
+        if ( outClass == String.class ) {
+          outDoc = this.mapper.writeValueAsString(node);
         } else {
-            node = this.mapper.convertValue(inDoc, ObjectNode.class);
+          outDoc = this.mapper.convertValue(node, outClass);
         }
-
-        if(node != null) {
-            Object outDoc;
-            try {
-                if( outClass == String.class )
-                    outDoc = this.mapper.writeValueAsString(node);
-                else
-                    outDoc = this.mapper.convertValue(node, outClass);
-
-                StreamsDatum outDatum = new StreamsDatum(outDoc, entry.getId(), entry.getTimestamp(), entry.getSequenceid());
-                outDatum.setMetadata(entry.getMetadata());
-                result.add(outDatum);
-            } catch (Throwable e) {
-                LOGGER.warn(e.getMessage());
-                LOGGER.warn(node.toString());
-            }
-        }
-
-        return result;
+        StreamsDatum outDatum = new StreamsDatum(outDoc, entry.getId(), entry.getTimestamp(), entry.getSequenceid());
+        outDatum.setMetadata(entry.getMetadata());
+        result.add(outDatum);
+      } catch (Throwable ex) {
+        LOGGER.warn(ex.getMessage());
+        LOGGER.warn(node.toString());
+      }
     }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        if( formats.size() > 0 )
-            this.mapper = StreamsJacksonMapper.getInstance(formats);
-        else
-            this.mapper = StreamsJacksonMapper.getInstance();
+    return result;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    if ( formats.size() > 0 ) {
+      this.mapper = StreamsJacksonMapper.getInstance(formats);
+    } else {
+      this.mapper = StreamsJacksonMapper.getInstance();
     }
+  }
 
-    @Override
-    public void cleanUp() {
+  @Override
+  public void cleanUp() {
 
-    }
-};
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-jackson/src/test/java/org/apache/streams/jackson/test/TypeConverterProcessorTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-jackson/src/test/java/org/apache/streams/jackson/test/TypeConverterProcessorTest.java b/streams-contrib/streams-processor-jackson/src/test/java/org/apache/streams/jackson/test/TypeConverterProcessorTest.java
index 1316d5c..e0759c3 100644
--- a/streams-contrib/streams-processor-jackson/src/test/java/org/apache/streams/jackson/test/TypeConverterProcessorTest.java
+++ b/streams-contrib/streams-processor-jackson/src/test/java/org/apache/streams/jackson/test/TypeConverterProcessorTest.java
@@ -18,78 +18,80 @@
 
 package org.apache.streams.jackson.test;
 
-import com.fasterxml.jackson.core.JsonParseException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.jackson.TypeConverterProcessor;
-import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
+
 import org.junit.Test;
 
 import java.io.IOException;
 import java.util.List;
 
-import static junit.framework.Assert.*;
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNotNull;
+import static junit.framework.Assert.assertTrue;
 
 /**
  *
  */
 public class TypeConverterProcessorTest {
 
-    private static final String DATASIFT_JSON = "{\"demographic\":{\"gender\":\"female\"},\"interaction\":{\"schema\":{\"version\":3},\"source\":\"Twitter for Android\",\"author\":{\"username\":\"ViiOLeee\",\"name\":\"Violeta Anguita\",\"id\":70931384,\"avatar\":\"http://pbs.twimg.com/profile_images/378800000851401229/bbf480cde2e9923a1d20acd393da0212_normal.jpeg\",\"link\":\"http://twitter.com/ViiOLeee\",\"language\":\"en\"},\"type\":\"twitter\",\"created_at\":\"Tue, 27 May 2014 22:38:15 +0000\",\"received_at\":1.401230295658E9,\"content\":\"RT @AliiAnguita: \\\"@Pharrell: Loved working with @edsheeran on Sing. He's a genius. https://t.co/wB2qKyJMRw\\\" @ViiOLeee  look at this!\",\"id\":\"1e3e5ef97532a580e0741841f5746728\",\"link\":\"http://twitter.com/ViiOLeee/status/471420141989666817\",\"mentions\":[\"Pharrell\",\"edsheeran\",\"ViiOLeee\",\"AliiAnguita\"],\"mention_ids\":[338084918,85452649,70931384]},\"klout\":{\"score\":34},\"language\":{\"tag\":\"en\",\"tag_extended\":\"en\",\
 "confidence\":98},\"links\":{\"code\":[200],\"created_at\":[\"Tue, 27 May 2014 14:28:06 +0000\"],\"meta\":{\"charset\":[\"UTF-8\"],\"content_type\":[\"text/html\"],\"description\":[\"Official Video for Ed Sheeran&#39;s track SING Get this track on iTunes: http://smarturl.it/EdSing Pre-order &#39;x&#39; on iTunes and get &#39;One&#39; instantly: http://smartu...\"],\"keywords\":[[\"ed sheeran\",\"ed sheeran sing\",\"ed sheeran new album\",\"Ed Sheeran (Musical Artist)\",\"ed sheeran one\",\"ed sheeran fault in our stars\",\"ed sheeran all of the stars\",\"s...\"]],\"lang\":[\"en\"],\"opengraph\":[{\"site_name\":\"YouTube\",\"url\":\"http://www.youtube.com/watch?v=tlYcUqEPN58\",\"title\":\"Ed Sheeran - SING [Official Video]\",\"image\":\"https://i1.ytimg.com/vi/tlYcUqEPN58/maxresdefault.jpg\",\"description\":\"Official Video for Ed Sheeran&#39;s track SING Get this track on iTunes: http://smarturl.it/EdSing Pre-order &#39;x&#39; on iTunes and get &#39;One&#39; instantly: http://smartu
 ...\",\"type\":\"video\"}],\"twitter\":[{\"card\":\"player\",\"site\":\"@youtube\",\"url\":\"http://www.youtube.com/watch?v=tlYcUqEPN58\",\"title\":\"Ed Sheeran - SING [Official Video]\",\"description\":\"Official Video for Ed Sheeran&#39;s track SING Get this track on iTunes: http://smarturl.it/EdSing Pre-order &#39;x&#39; on iTunes and get &#39;One&#39; instantly: http://smartu...\",\"image\":\"https://i1.ytimg.com/vi/tlYcUqEPN58/maxresdefault.jpg\",\"app\":{\"iphone\":{\"name\":\"YouTube\",\"id\":\"544007664\",\"url\":\"vnd.youtube://watch/tlYcUqEPN58\"},\"ipad\":{\"name\":\"YouTube\",\"id\":\"544007664\",\"url\":\"vnd.youtube://watch/tlYcUqEPN58\"},\"googleplay\":{\"name\":\"YouTube\",\"id\":\"com.google.android.youtube\",\"url\":\"http://www.youtube.com/watch?v=tlYcUqEPN58\"}},\"player\":\"https://www.youtube.com/embed/tlYcUqEPN58\",\"player_width\":\"1280\",\"player_height\":\"720\"}]},\"normalized_url\":[\"https://youtube.com/watch?v=tlYcUqEPN58\"],\"retweet_count\":[0],\"tit
 le\":[\"Ed Sheeran - SING [Official Video] - YouTube\"],\"url\":[\"https://www.youtube.com/watch?v=tlYcUqEPN58\"]},\"twitter\":{\"id\":\"471420141989666817\",\"retweet\":{\"text\":\"\\\"@Pharrell: Loved working with @edsheeran on Sing. He's a genius. https://t.co/wB2qKyJMRw\\\" @ViiOLeee  look at this!\",\"id\":\"471420141989666817\",\"user\":{\"name\":\"Violeta Anguita\",\"description\":\"La vida no seria la fiesta que todos esperamos, pero mientras estemos aqui debemos BAILAR!!! #ErasmusOnceErasmusForever\",\"location\":\"Espanhaa..Olaa!\",\"statuses_count\":5882,\"followers_count\":249,\"friends_count\":1090,\"screen_name\":\"ViiOLeee\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/378800000851401229/bbf480cde2e9923a1d20acd393da0212_normal.jpeg\",\"profile_image_url_https\":\"https://pbs.twimg.com/profile_images/378800000851401229/bbf480cde2e9923a1d20acd393da0212_normal.jpeg\",\"lang\":\"en\",\"time_zone\":\"Madrid\",\"utc_offset\":7200,\"listed_count\":1,\"id\":709
 31384,\"id_str\":\"70931384\",\"geo_enabled\":false,\"verified\":false,\"favourites_count\":275,\"created_at\":\"Wed, 02 Sep 2009 10:19:59 +0000\"},\"source\":\"<a href=\\\"http://twitter.com/download/android\\\" rel=\\\"nofollow\\\">Twitter for Android</a>\",\"count\":1,\"created_at\":\"Tue, 27 May 2014 22:38:15 +0000\",\"mentions\":[\"Pharrell\",\"edsheeran\",\"ViiOLeee\",\"AliiAnguita\"],\"mention_ids\":[338084918,85452649,70931384],\"links\":[\"https://www.youtube.com/watch?v=tlYcUqEPN58\"],\"display_urls\":[\"youtube.com/watch?v=tlYcUq\ufffd\ufffd\ufffd\"],\"domains\":[\"www.youtube.com\"],\"lang\":\"en\"},\"retweeted\":{\"id\":\"471419867078209536\",\"user\":{\"name\":\"Alicia Anguita \",\"description\":\"Estudiante de Ingenieria de la Edificaci\ufffd\ufffdn en Granada.\",\"statuses_count\":371,\"followers_count\":185,\"friends_count\":404,\"screen_name\":\"AliiAnguita\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/424248659677442048/qCPZL8c9_normal.jpeg\",\"profile_image_url_
 https\":\"https://pbs.twimg.com/profile_images/424248659677442048/qCPZL8c9_normal.jpeg\",\"lang\":\"es\",\"listed_count\":0,\"id\":561201891,\"id_str\":\"561201891\",\"geo_enabled\":false,\"verified\":false,\"favourites_count\":17,\"created_at\":\"Mon, 23 Apr 2012 13:11:44 +0000\"},\"source\":\"<a href=\\\"http://twitter.com/download/android\\\" rel=\\\"nofollow\\\">Twitter for Android</a>\",\"created_at\":\"Tue, 27 May 2014 22:37:09 +0000\"}}}";
+  private static final String DATASIFT_JSON = "{\"demographic\":{\"gender\":\"female\"},\"interaction\":{\"schema\":{\"version\":3},\"source\":\"Twitter for Android\",\"author\":{\"username\":\"ViiOLeee\",\"name\":\"Violeta Anguita\",\"id\":70931384,\"avatar\":\"http://pbs.twimg.com/profile_images/378800000851401229/bbf480cde2e9923a1d20acd393da0212_normal.jpeg\",\"link\":\"http://twitter.com/ViiOLeee\",\"language\":\"en\"},\"type\":\"twitter\",\"created_at\":\"Tue, 27 May 2014 22:38:15 +0000\",\"received_at\":1.401230295658E9,\"content\":\"RT @AliiAnguita: \\\"@Pharrell: Loved working with @edsheeran on Sing. He's a genius. https://t.co/wB2qKyJMRw\\\" @ViiOLeee  look at this!\",\"id\":\"1e3e5ef97532a580e0741841f5746728\",\"link\":\"http://twitter.com/ViiOLeee/status/471420141989666817\",\"mentions\":[\"Pharrell\",\"edsheeran\",\"ViiOLeee\",\"AliiAnguita\"],\"mention_ids\":[338084918,85452649,70931384]},\"klout\":{\"score\":34},\"language\":{\"tag\":\"en\",\"tag_extended\":\"en\",\"c
 onfidence\":98},\"links\":{\"code\":[200],\"created_at\":[\"Tue, 27 May 2014 14:28:06 +0000\"],\"meta\":{\"charset\":[\"UTF-8\"],\"content_type\":[\"text/html\"],\"description\":[\"Official Video for Ed Sheeran&#39;s track SING Get this track on iTunes: http://smarturl.it/EdSing Pre-order &#39;x&#39; on iTunes and get &#39;One&#39; instantly: http://smartu...\"],\"keywords\":[[\"ed sheeran\",\"ed sheeran sing\",\"ed sheeran new album\",\"Ed Sheeran (Musical Artist)\",\"ed sheeran one\",\"ed sheeran fault in our stars\",\"ed sheeran all of the stars\",\"s...\"]],\"lang\":[\"en\"],\"opengraph\":[{\"site_name\":\"YouTube\",\"url\":\"http://www.youtube.com/watch?v=tlYcUqEPN58\",\"title\":\"Ed Sheeran - SING [Official Video]\",\"image\":\"https://i1.ytimg.com/vi/tlYcUqEPN58/maxresdefault.jpg\",\"description\":\"Official Video for Ed Sheeran&#39;s track SING Get this track on iTunes: http://smarturl.it/EdSing Pre-order &#39;x&#39; on iTunes and get &#39;One&#39; instantly: http://smartu..
 .\",\"type\":\"video\"}],\"twitter\":[{\"card\":\"player\",\"site\":\"@youtube\",\"url\":\"http://www.youtube.com/watch?v=tlYcUqEPN58\",\"title\":\"Ed Sheeran - SING [Official Video]\",\"description\":\"Official Video for Ed Sheeran&#39;s track SING Get this track on iTunes: http://smarturl.it/EdSing Pre-order &#39;x&#39; on iTunes and get &#39;One&#39; instantly: http://smartu...\",\"image\":\"https://i1.ytimg.com/vi/tlYcUqEPN58/maxresdefault.jpg\",\"app\":{\"iphone\":{\"name\":\"YouTube\",\"id\":\"544007664\",\"url\":\"vnd.youtube://watch/tlYcUqEPN58\"},\"ipad\":{\"name\":\"YouTube\",\"id\":\"544007664\",\"url\":\"vnd.youtube://watch/tlYcUqEPN58\"},\"googleplay\":{\"name\":\"YouTube\",\"id\":\"com.google.android.youtube\",\"url\":\"http://www.youtube.com/watch?v=tlYcUqEPN58\"}},\"player\":\"https://www.youtube.com/embed/tlYcUqEPN58\",\"player_width\":\"1280\",\"player_height\":\"720\"}]},\"normalized_url\":[\"https://youtube.com/watch?v=tlYcUqEPN58\"],\"retweet_count\":[0],\"title
 \":[\"Ed Sheeran - SING [Official Video] - YouTube\"],\"url\":[\"https://www.youtube.com/watch?v=tlYcUqEPN58\"]},\"twitter\":{\"id\":\"471420141989666817\",\"retweet\":{\"text\":\"\\\"@Pharrell: Loved working with @edsheeran on Sing. He's a genius. https://t.co/wB2qKyJMRw\\\" @ViiOLeee  look at this!\",\"id\":\"471420141989666817\",\"user\":{\"name\":\"Violeta Anguita\",\"description\":\"La vida no seria la fiesta que todos esperamos, pero mientras estemos aqui debemos BAILAR!!! #ErasmusOnceErasmusForever\",\"location\":\"Espanhaa..Olaa!\",\"statuses_count\":5882,\"followers_count\":249,\"friends_count\":1090,\"screen_name\":\"ViiOLeee\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/378800000851401229/bbf480cde2e9923a1d20acd393da0212_normal.jpeg\",\"profile_image_url_https\":\"https://pbs.twimg.com/profile_images/378800000851401229/bbf480cde2e9923a1d20acd393da0212_normal.jpeg\",\"lang\":\"en\",\"time_zone\":\"Madrid\",\"utc_offset\":7200,\"listed_count\":1,\"id\":70931
 384,\"id_str\":\"70931384\",\"geo_enabled\":false,\"verified\":false,\"favourites_count\":275,\"created_at\":\"Wed, 02 Sep 2009 10:19:59 +0000\"},\"source\":\"<a href=\\\"http://twitter.com/download/android\\\" rel=\\\"nofollow\\\">Twitter for Android</a>\",\"count\":1,\"created_at\":\"Tue, 27 May 2014 22:38:15 +0000\",\"mentions\":[\"Pharrell\",\"edsheeran\",\"ViiOLeee\",\"AliiAnguita\"],\"mention_ids\":[338084918,85452649,70931384],\"links\":[\"https://www.youtube.com/watch?v=tlYcUqEPN58\"],\"display_urls\":[\"youtube.com/watch?v=tlYcUq\ufffd\ufffd\ufffd\"],\"domains\":[\"www.youtube.com\"],\"lang\":\"en\"},\"retweeted\":{\"id\":\"471419867078209536\",\"user\":{\"name\":\"Alicia Anguita \",\"description\":\"Estudiante de Ingenieria de la Edificaci\ufffd\ufffdn en Granada.\",\"statuses_count\":371,\"followers_count\":185,\"friends_count\":404,\"screen_name\":\"AliiAnguita\",\"profile_image_url\":\"http://pbs.twimg.com/profile_images/424248659677442048/qCPZL8c9_normal.jpeg\",\"profile_image_url_ht
 tps\":\"https://pbs.twimg.com/profile_images/424248659677442048/qCPZL8c9_normal.jpeg\",\"lang\":\"es\",\"listed_count\":0,\"id\":561201891,\"id_str\":\"561201891\",\"geo_enabled\":false,\"verified\":false,\"favourites_count\":17,\"created_at\":\"Mon, 23 Apr 2012 13:11:44 +0000\"},\"source\":\"<a href=\\\"http://twitter.com/download/android\\\" rel=\\\"nofollow\\\">Twitter for Android</a>\",\"created_at\":\"Tue, 27 May 2014 22:37:09 +0000\"}}}";
 
-    public static final String DATASIFT_FORMAT = "EEE, dd MMM yyyy HH:mm:ss Z";
+  public static final String DATASIFT_FORMAT = "EEE, dd MMM yyyy HH:mm:ss Z";
 
-    @Test
-    public void testTypeConverterStringToString() {
-        final String ID = "1";
-        StreamsProcessor processor = new TypeConverterProcessor(String.class, String.class, Lists.newArrayList(DATASIFT_FORMAT));
-        processor.prepare(null);
-        StreamsDatum datum = new StreamsDatum(DATASIFT_JSON, ID);
-        List<StreamsDatum> result = processor.process(datum);
-        assertNotNull(result);
-        assertEquals(1, result.size());
-        StreamsDatum resultDatum = result.get(0);
-        assertNotNull(resultDatum);
-        assertNotNull(resultDatum.getDocument());
-        assertTrue(resultDatum.getDocument() instanceof String);
-        assertEquals(ID, resultDatum.getId());
-    }
+  @Test
+  public void testTypeConverterStringToString() {
+    final String ID = "1";
+    StreamsProcessor processor = new TypeConverterProcessor(String.class, String.class, Lists.newArrayList(DATASIFT_FORMAT));
+    processor.prepare(null);
+    StreamsDatum datum = new StreamsDatum(DATASIFT_JSON, ID);
+    List<StreamsDatum> result = processor.process(datum);
+    assertNotNull(result);
+    assertEquals(1, result.size());
+    StreamsDatum resultDatum = result.get(0);
+    assertNotNull(resultDatum);
+    assertNotNull(resultDatum.getDocument());
+    assertTrue(resultDatum.getDocument() instanceof String);
+    assertEquals(ID, resultDatum.getId());
+  }
 
-    @Test
-    public void testTypeConverterStringToObjectNode() {
-        final String ID = "1";
-        StreamsProcessor processor = new TypeConverterProcessor(String.class, ObjectNode.class, Lists.newArrayList(DATASIFT_FORMAT));
-        processor.prepare(null);
-        StreamsDatum datum = new StreamsDatum(DATASIFT_JSON, ID);
-        List<StreamsDatum> result = processor.process(datum);
-        assertNotNull(result);
-        assertEquals(1, result.size());
-        StreamsDatum resultDatum = result.get(0);
-        assertNotNull(resultDatum);
-        assertNotNull(resultDatum.getDocument());
-        assertTrue(resultDatum.getDocument() instanceof ObjectNode);
-        assertEquals(ID, resultDatum.getId());
-    }
+  @Test
+  public void testTypeConverterStringToObjectNode() {
+    final String ID = "1";
+    StreamsProcessor processor = new TypeConverterProcessor(String.class, ObjectNode.class, Lists.newArrayList(DATASIFT_FORMAT));
+    processor.prepare(null);
+    StreamsDatum datum = new StreamsDatum(DATASIFT_JSON, ID);
+    List<StreamsDatum> result = processor.process(datum);
+    assertNotNull(result);
+    assertEquals(1, result.size());
+    StreamsDatum resultDatum = result.get(0);
+    assertNotNull(resultDatum);
+    assertNotNull(resultDatum.getDocument());
+    assertTrue(resultDatum.getDocument() instanceof ObjectNode);
+    assertEquals(ID, resultDatum.getId());
+  }
 
-    @Test
-    public void testTypeConverterObjectNodeToString() throws IOException {
-        final String ID = "1";
-        StreamsProcessor processor = new TypeConverterProcessor(ObjectNode.class, String.class, Lists.newArrayList(DATASIFT_FORMAT));
-        processor.prepare(null);
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(DATASIFT_FORMAT));
-        ObjectNode node = mapper.readValue(DATASIFT_JSON, ObjectNode.class);
-        StreamsDatum datum = new StreamsDatum(node, ID);
-        List<StreamsDatum> result = processor.process(datum);
-        assertNotNull(result);
-        assertEquals(1, result.size());
-        StreamsDatum resultDatum = result.get(0);
-        assertNotNull(resultDatum);
-        assertNotNull(resultDatum.getDocument());
-        assertTrue(resultDatum.getDocument() instanceof String);
-        assertEquals(ID, resultDatum.getId());
-    }
+  @Test
+  public void testTypeConverterObjectNodeToString() throws IOException {
+    final String ID = "1";
+    StreamsProcessor processor = new TypeConverterProcessor(ObjectNode.class, String.class, Lists.newArrayList(DATASIFT_FORMAT));
+    processor.prepare(null);
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance(Lists.newArrayList(DATASIFT_FORMAT));
+    ObjectNode node = mapper.readValue(DATASIFT_JSON, ObjectNode.class);
+    StreamsDatum datum = new StreamsDatum(node, ID);
+    List<StreamsDatum> result = processor.process(datum);
+    assertNotNull(result);
+    assertEquals(1, result.size());
+    StreamsDatum resultDatum = result.get(0);
+    assertNotNull(resultDatum);
+    assertNotNull(resultDatum.getDocument());
+    assertTrue(resultDatum.getDocument() instanceof String);
+    assertEquals(ID, resultDatum.getId());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathExtractor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathExtractor.java b/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathExtractor.java
index 24288f1..c2c3705 100644
--- a/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathExtractor.java
+++ b/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathExtractor.java
@@ -18,18 +18,21 @@
 
 package org.apache.streams.json;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
 import com.google.common.collect.Lists;
 import com.jayway.jsonpath.JsonPath;
+
 import net.minidev.json.JSONArray;
 import net.minidev.json.JSONObject;
+
 import org.apache.commons.lang3.StringUtils;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -38,118 +41,118 @@ import java.util.List;
 
 /**
  * Provides a base implementation for extracting json fields and
- * objects from datums using JsonPath syntax
+ * objects from datums using JsonPath syntax.
  */
 public class JsonPathExtractor implements StreamsProcessor {
 
-    private final static String STREAMS_ID = "JsonPathExtractor";
+  private static final String STREAMS_ID = "JsonPathExtractor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(JsonPathExtractor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(JsonPathExtractor.class);
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    private String pathExpression;
-    private JsonPath jsonPath;
+  private String pathExpression;
+  private JsonPath jsonPath;
 
-    public JsonPathExtractor() {
-        LOGGER.info("creating JsonPathExtractor");
-    }
+  public JsonPathExtractor() {
+    LOGGER.info("creating JsonPathExtractor");
+  }
 
-    public JsonPathExtractor(String pathExpression) {
-        this.pathExpression = pathExpression;
-        LOGGER.info("creating JsonPathExtractor for " + this.pathExpression);
-    }
+  public JsonPathExtractor(String pathExpression) {
+    this.pathExpression = pathExpression;
+    LOGGER.info("creating JsonPathExtractor for " + this.pathExpression);
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        List<StreamsDatum> result = Lists.newArrayList();
+    List<StreamsDatum> result = Lists.newArrayList();
 
-        String json = null;
+    String json = null;
 
-        LOGGER.debug("{} processing {}", STREAMS_ID);
+    LOGGER.debug("{} processing {}", STREAMS_ID);
 
-        if( entry.getDocument() instanceof ObjectNode ) {
-            ObjectNode node = (ObjectNode) entry.getDocument();
-            try {
-                json = mapper.writeValueAsString(node);
-            } catch (JsonProcessingException e) {
-                LOGGER.warn(e.getMessage());
-            }
-        } else if( entry.getDocument() instanceof String ) {
-            json = (String) entry.getDocument();
-        }
+    if ( entry.getDocument() instanceof ObjectNode ) {
+      ObjectNode node = (ObjectNode) entry.getDocument();
+      try {
+        json = mapper.writeValueAsString(node);
+      } catch (JsonProcessingException ex) {
+        LOGGER.warn(ex.getMessage());
+      }
+    } else if ( entry.getDocument() instanceof String ) {
+      json = (String) entry.getDocument();
+    }
 
-        if( StringUtils.isNotEmpty(json)) {
-
-            try {
-                Object readResult = jsonPath.read(json);
-
-                if (readResult instanceof String) {
-                    String match = (String) readResult;
-                    LOGGER.info("Matched String: " + match);
-                    StreamsDatum matchDatum = new StreamsDatum(match);
-                    result.add(matchDatum);
-                } else if (readResult instanceof JSONObject) {
-                    JSONObject match = (JSONObject) readResult;
-                    LOGGER.info("Matched Object: " + match);
-                    ObjectNode objectNode = mapper.readValue(mapper.writeValueAsString(match), ObjectNode.class);
-                    StreamsDatum matchDatum = new StreamsDatum(objectNode);
-                    result.add(matchDatum);
-                } else if (readResult instanceof JSONArray) {
-                    LOGGER.info("Matched Array:");
-                    JSONArray array = (JSONArray) readResult;
-                    Iterator iterator = array.iterator();
-                    while (iterator.hasNext()) {
-                        Object item = iterator.next();
-                        if( item instanceof String ) {
-                            LOGGER.info("String Item:" + item);
-                            String match = (String) item;
-                            StreamsDatum matchDatum = new StreamsDatum(match);
-                            result.add(matchDatum);
-                        } else if ( item instanceof JSONObject ) {
-                            LOGGER.info("Object Item:" + item);
-                            JSONObject match = (JSONObject) item;
-                            ObjectNode objectNode = mapper.readValue(mapper.writeValueAsString(match), ObjectNode.class);
-                            StreamsDatum matchDatum = new StreamsDatum(objectNode);
-                            result.add(matchDatum);
-                        } else {
-                            LOGGER.info("Other Item:" + item.toString());
-                        }
-                    }
-                } else {
-                    LOGGER.info("Other Match:" + readResult.toString());
-                }
-
-            } catch( Exception e ) {
-                LOGGER.warn(e.getMessage());
+    if ( StringUtils.isNotEmpty(json)) {
+
+      try {
+        Object readResult = jsonPath.read(json);
+
+        if (readResult instanceof String) {
+          String match = (String) readResult;
+          LOGGER.info("Matched String: " + match);
+          StreamsDatum matchDatum = new StreamsDatum(match);
+          result.add(matchDatum);
+        } else if (readResult instanceof JSONObject) {
+          JSONObject match = (JSONObject) readResult;
+          LOGGER.info("Matched Object: " + match);
+          ObjectNode objectNode = mapper.readValue(mapper.writeValueAsString(match), ObjectNode.class);
+          StreamsDatum matchDatum = new StreamsDatum(objectNode);
+          result.add(matchDatum);
+        } else if (readResult instanceof JSONArray) {
+          LOGGER.info("Matched Array:");
+          JSONArray array = (JSONArray) readResult;
+          Iterator iterator = array.iterator();
+          while (iterator.hasNext()) {
+            Object item = iterator.next();
+            if ( item instanceof String ) {
+              LOGGER.info("String Item:" + item);
+              String match = (String) item;
+              StreamsDatum matchDatum = new StreamsDatum(match);
+              result.add(matchDatum);
+            } else if ( item instanceof JSONObject ) {
+              LOGGER.info("Object Item:" + item);
+              JSONObject match = (JSONObject) item;
+              ObjectNode objectNode = mapper.readValue(mapper.writeValueAsString(match), ObjectNode.class);
+              StreamsDatum matchDatum = new StreamsDatum(objectNode);
+              result.add(matchDatum);
+            } else {
+              LOGGER.info("Other Item:" + item.toString());
             }
-
+          }
         } else {
-            LOGGER.warn("result empty");
+          LOGGER.info("Other Match:" + readResult.toString());
         }
 
-        return result;
+      } catch ( Exception ex ) {
+        LOGGER.warn(ex.getMessage());
+      }
 
+    } else {
+      LOGGER.warn("result empty");
     }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        if( configurationObject instanceof String )
-            jsonPath = JsonPath.compile((String)(configurationObject));
-        else if( configurationObject instanceof String[] )
-            jsonPath = JsonPath.compile(((String[])(configurationObject))[0]);
+    return result;
 
-        mapper.registerModule(new JsonOrgModule());
-    }
+  }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.info("shutting down JsonPathExtractor for " + this.pathExpression);
+  @Override
+  public void prepare(Object configurationObject) {
+    if ( configurationObject instanceof String ) {
+      jsonPath = JsonPath.compile((String) (configurationObject));
+    } else if ( configurationObject instanceof String[] ) {
+      jsonPath = JsonPath.compile(((String[]) (configurationObject))[0]);
     }
-};
+    mapper.registerModule(new JsonOrgModule());
+  }
+
+  @Override
+  public void cleanUp() {
+    LOGGER.info("shutting down JsonPathExtractor for " + this.pathExpression);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathFilter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathFilter.java b/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathFilter.java
index fcf34d7..ec741c2 100644
--- a/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathFilter.java
+++ b/streams-contrib/streams-processor-json/src/main/java/org/apache/streams/json/JsonPathFilter.java
@@ -19,6 +19,10 @@
 
 package org.apache.streams.json;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -28,12 +32,11 @@ import com.fasterxml.jackson.datatype.jsonorg.JsonOrgModule;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.jayway.jsonpath.JsonPath;
+
 import net.minidev.json.JSONArray;
 import net.minidev.json.JSONObject;
+
 import org.apache.commons.lang3.StringUtils;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -43,134 +46,133 @@ import java.util.Map;
 
 /**
  * Provides a base implementation for filtering datums which
- * do not contain specific fields using JsonPath syntax
+ * do not contain specific fields using JsonPath syntax.
  */
 public class JsonPathFilter implements StreamsProcessor {
 
-    private final static String STREAMS_ID = "JsonPathFilter";
+  private static final String STREAMS_ID = "JsonPathFilter";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(JsonPathFilter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(JsonPathFilter.class);
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    private String pathExpression;
-    private JsonPath jsonPath;
-    private String destNodeName;
-
-    public JsonPathFilter() {
-        LOGGER.info("creating JsonPathFilter");
-    }
+  private String pathExpression;
+  private JsonPath jsonPath;
+  private String destNodeName;
 
-    public JsonPathFilter(String pathExpression) {
-        this.pathExpression = pathExpression;
-        LOGGER.info("creating JsonPathFilter for " + this.pathExpression);
-    }
+  public JsonPathFilter() {
+    LOGGER.info("creating JsonPathFilter");
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public JsonPathFilter(String pathExpression) {
+    this.pathExpression = pathExpression;
+    LOGGER.info("creating JsonPathFilter for " + this.pathExpression);
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        List<StreamsDatum> result = Lists.newArrayList();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        String json = null;
+    List<StreamsDatum> result = Lists.newArrayList();
 
-        ObjectNode document = null;
+    String json = null;
 
-        LOGGER.debug("{} processing {}", STREAMS_ID);
+    ObjectNode document = null;
 
-        if( entry.getDocument() instanceof ObjectNode ) {
-            document = (ObjectNode) entry.getDocument();
-            try {
-                json = mapper.writeValueAsString(document);
-            } catch (JsonProcessingException e) {
-                e.printStackTrace();
-            }
-        } else if( entry.getDocument() instanceof String ) {
-            json = (String) entry.getDocument();
-            try {
-                document = mapper.readValue(json, ObjectNode.class);
-            } catch (IOException e) {
-                e.printStackTrace();
-                return null;
-            }
-        }
+    LOGGER.debug("{} processing {}", STREAMS_ID);
 
-        Preconditions.checkNotNull(document);
-
-        if( StringUtils.isNotEmpty(json)) {
-
-            Object srcResult = null;
-            try {
-                srcResult = jsonPath.read(json);
-
-            } catch( Exception e ) {
-                e.printStackTrace();
-                LOGGER.warn(e.getMessage());
-            }
-
-            Preconditions.checkNotNull(srcResult);
-
-            String[] path = StringUtils.split(pathExpression, '.');
-            ObjectNode node = document;
-            for (int i = 1; i < path.length-1; i++) {
-                node = (ObjectNode) document.get(path[i]);
-            }
-
-            Preconditions.checkNotNull(node);
-
-            if( srcResult instanceof JSONArray ) {
-                try {
-                    ArrayNode jsonNode = mapper.convertValue(srcResult, ArrayNode.class);
-                    if( jsonNode.size() == 1 ) {
-                        JsonNode item = jsonNode.get(0);
-                        node.set(destNodeName, item);
-                    } else {
-                        node.set(destNodeName, jsonNode);
-                    }
-                } catch (Exception e) {
-                    LOGGER.warn(e.getMessage());
-                }
-            } else if( srcResult instanceof JSONObject ) {
-                try {
-                    ObjectNode jsonNode = mapper.convertValue(srcResult, ObjectNode.class);
-                    node.set(destNodeName, jsonNode);
-                } catch (Exception e) {
-                    LOGGER.warn(e.getMessage());
-                }
-            } else if( srcResult instanceof String ) {
-                try {
-                    node.put(destNodeName, (String) srcResult);
-                } catch (Exception e) {
-                    LOGGER.warn(e.getMessage());
-                }
-            }
+    if ( entry.getDocument() instanceof ObjectNode ) {
+      document = (ObjectNode) entry.getDocument();
+      try {
+        json = mapper.writeValueAsString(document);
+      } catch (JsonProcessingException ex) {
+        ex.printStackTrace();
+      }
+    } else if ( entry.getDocument() instanceof String ) {
+      json = (String) entry.getDocument();
+      try {
+        document = mapper.readValue(json, ObjectNode.class);
+      } catch (IOException ex) {
+        ex.printStackTrace();
+        return null;
+      }
+    }
 
+    Preconditions.checkNotNull(document);
+
+    if ( StringUtils.isNotEmpty(json)) {
+
+      Object srcResult = null;
+      try {
+        srcResult = jsonPath.read(json);
+      } catch ( Exception ex ) {
+        ex.printStackTrace();
+        LOGGER.warn(ex.getMessage());
+      }
+
+      Preconditions.checkNotNull(srcResult);
+
+      String[] path = StringUtils.split(pathExpression, '.');
+      ObjectNode node = document;
+      for (int i = 1; i < path.length - 1; i++) {
+        node = (ObjectNode) document.get(path[i]);
+      }
+
+      Preconditions.checkNotNull(node);
+
+      if ( srcResult instanceof JSONArray ) {
+        try {
+          ArrayNode jsonNode = mapper.convertValue(srcResult, ArrayNode.class);
+          if ( jsonNode.size() == 1 ) {
+            JsonNode item = jsonNode.get(0);
+            node.set(destNodeName, item);
+          } else {
+            node.set(destNodeName, jsonNode);
+          }
+        } catch (Exception ex) {
+          LOGGER.warn(ex.getMessage());
+        }
+      } else if ( srcResult instanceof JSONObject ) {
+        try {
+          ObjectNode jsonNode = mapper.convertValue(srcResult, ObjectNode.class);
+          node.set(destNodeName, jsonNode);
+        } catch (Exception ex) {
+          LOGGER.warn(ex.getMessage());
+        }
+      } else if ( srcResult instanceof String ) {
+        try {
+          node.put(destNodeName, (String) srcResult);
+        } catch (Exception ex) {
+          LOGGER.warn(ex.getMessage());
         }
+      }
 
-        result.add(new StreamsDatum(document));
+    }
 
-        return result;
+    result.add(new StreamsDatum(document));
 
-    }
+    return result;
 
-    @Override
-    public void prepare(Object configurationObject) {
-        if( configurationObject instanceof Map) {
-            Map<String,String> params = ( Map<String,String>) configurationObject;
-            pathExpression = params.get("pathExpression");
-            jsonPath = JsonPath.compile(pathExpression);
-            destNodeName = pathExpression.substring(pathExpression.lastIndexOf(".") + 1);
-        }
+  }
 
-        mapper.registerModule(new JsonOrgModule());
+  @Override
+  public void prepare(Object configurationObject) {
+    if ( configurationObject instanceof Map) {
+      Map<String,String> params = ( Map<String,String>) configurationObject;
+      pathExpression = params.get("pathExpression");
+      jsonPath = JsonPath.compile(pathExpression);
+      destNodeName = pathExpression.substring(pathExpression.lastIndexOf(".") + 1);
     }
 
-    @Override
-    public void cleanUp() {
-        LOGGER.info("shutting down JsonPathFilter for " + this.pathExpression);
-    }
-};
+    mapper.registerModule(new JsonOrgModule());
+  }
+
+  @Override
+  public void cleanUp() {
+    LOGGER.info("shutting down JsonPathFilter for " + this.pathExpression);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-json/src/test/java/org/apache/streams/json/test/JsonPathExtractorTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-json/src/test/java/org/apache/streams/json/test/JsonPathExtractorTest.java b/streams-contrib/streams-processor-json/src/test/java/org/apache/streams/json/test/JsonPathExtractorTest.java
index 2ab3b7f..1ab7c00 100644
--- a/streams-contrib/streams-processor-json/src/test/java/org/apache/streams/json/test/JsonPathExtractorTest.java
+++ b/streams-contrib/streams-processor-json/src/test/java/org/apache/streams/json/test/JsonPathExtractorTest.java
@@ -19,83 +19,85 @@
 
 package org.apache.streams.json.test;
 
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.commons.io.FileUtils;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.json.JsonPathExtractor;
+
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+import org.apache.commons.io.FileUtils;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.*;
+import java.io.File;
+import java.io.IOException;
 import java.util.List;
 
-import static org.hamcrest.CoreMatchers.*;
+import static org.hamcrest.CoreMatchers.is;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 
 /**
  * Test for extracting json fields and
- * objects from datums using JsonPath syntax
+ * objects from datums using JsonPath syntax.
  */
 public class JsonPathExtractorTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(JsonPathExtractorTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(JsonPathExtractorTest.class);
 
-    private String testJson;
+  private String testJson;
 
-    @Before
-    public void initialize() {
-        try {
-            testJson = FileUtils.readFileToString(new File("src/test/resources/books.json"));
-        } catch (IOException e) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+  @Before
+  public void initialize() {
+    try {
+      testJson = FileUtils.readFileToString(new File("src/test/resources/books.json"));
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void test1()
-    {
-        JsonPathExtractor extractor = new JsonPathExtractor();
-        extractor.prepare("$.store.book[*].author");
-        List<StreamsDatum> result = extractor.process(new StreamsDatum(testJson));
-        assertThat(result.size(), is(2));
-        assertTrue(result.get(0).getDocument() instanceof String);
-        assertTrue(result.get(1).getDocument() instanceof String);
-    }
+  @Test
+  public void test1()
+  {
+    JsonPathExtractor extractor = new JsonPathExtractor();
+    extractor.prepare("$.store.book[*].author");
+    List<StreamsDatum> result = extractor.process(new StreamsDatum(testJson));
+    assertThat(result.size(), is(2));
+    assertTrue(result.get(0).getDocument() instanceof String);
+    assertTrue(result.get(1).getDocument() instanceof String);
+  }
 
-    @Test
-    public void test2()
-    {
-        JsonPathExtractor extractor = new JsonPathExtractor();
-        extractor.prepare("$.store.book[?(@.category == 'reference')]");
-        List<StreamsDatum> result = extractor.process(new StreamsDatum(testJson));
-        assertThat(result.size(), is(1));
-        assertTrue(result.get(0).getDocument() instanceof ObjectNode);
-    }
+  @Test
+  public void test2()
+  {
+    JsonPathExtractor extractor = new JsonPathExtractor();
+    extractor.prepare("$.store.book[?(@.category == 'reference')]");
+    List<StreamsDatum> result = extractor.process(new StreamsDatum(testJson));
+    assertThat(result.size(), is(1));
+    assertTrue(result.get(0).getDocument() instanceof ObjectNode);
+  }
 
-    @Test
-    public void test3()
-    {
-        JsonPathExtractor extractor = new JsonPathExtractor();
-        extractor.prepare("$.store.book[?(@.price > 10)]");
-        List<StreamsDatum> result = extractor.process(new StreamsDatum(testJson));
-        assertThat(result.size(), is(1));
-        assertTrue(result.get(0).getDocument() instanceof ObjectNode);
-    }
+  @Test
+  public void test3()
+  {
+    JsonPathExtractor extractor = new JsonPathExtractor();
+    extractor.prepare("$.store.book[?(@.price > 10)]");
+    List<StreamsDatum> result = extractor.process(new StreamsDatum(testJson));
+    assertThat(result.size(), is(1));
+    assertTrue(result.get(0).getDocument() instanceof ObjectNode);
+  }
 
-    @Test
-    public void test4()
-    {
-        JsonPathExtractor extractor = new JsonPathExtractor();
-        extractor.prepare("$.store.book[?(@.isbn)]");
-        List<StreamsDatum> result = extractor.process(new StreamsDatum(testJson));
-        assertThat(result.size(), is(1));
-        assertTrue(result.get(0).getDocument() instanceof ObjectNode);
-    }
+  @Test
+  public void test4()
+  {
+    JsonPathExtractor extractor = new JsonPathExtractor();
+    extractor.prepare("$.store.book[?(@.isbn)]");
+    List<StreamsDatum> result = extractor.process(new StreamsDatum(testJson));
+    assertThat(result.size(), is(1));
+    assertTrue(result.get(0).getDocument() instanceof ObjectNode);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/AccountTypeProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/AccountTypeProcessor.java b/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/AccountTypeProcessor.java
index 5b23e16..04a680d 100644
--- a/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/AccountTypeProcessor.java
+++ b/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/AccountTypeProcessor.java
@@ -26,6 +26,7 @@ import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -33,42 +34,49 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Enrich actor with account type
+ * Enrich actor with account type.
  */
 public class AccountTypeProcessor extends SimpleHTTPGetProcessor {
 
-    private final static String STREAMS_ID = "AccountTypeProcessor";
+  private static final String STREAMS_ID = "AccountTypeProcessor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(AccountTypeProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(AccountTypeProcessor.class);
 
-    public AccountTypeProcessor() {
-        this(new ComponentConfigurator<>(HttpProcessorConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("peoplepattern")));
-    }
+  /**
+   * AccountTypeProcessor constructor - resolves HttpProcessorConfiguration from JVM 'peoplepattern'.
+   */
+  public AccountTypeProcessor() {
+    this(new ComponentConfigurator<>(HttpProcessorConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("peoplepattern")));
+  }
 
-    public AccountTypeProcessor(HttpProcessorConfiguration peoplePatternConfiguration) {
-        super(peoplePatternConfiguration);
-        LOGGER.info("creating AccountTypeProcessor");
-        configuration.setProtocol("https");
-        configuration.setHostname("api.peoplepattern.com");
-        configuration.setResourcePath("/v0.2/account_type/");
-        configuration.setEntity(HttpProcessorConfiguration.Entity.ACTOR);
-        configuration.setExtension("account_type");
-    }
+  /**
+   * AccountTypeProcessor constructor - uses supplied HttpProcessorConfiguration.
+   * @param peoplePatternConfiguration peoplePatternConfiguration
+   */
+  public AccountTypeProcessor(HttpProcessorConfiguration peoplePatternConfiguration) {
+    super(peoplePatternConfiguration);
+    LOGGER.info("creating AccountTypeProcessor");
+    configuration.setProtocol("https");
+    configuration.setHostname("api.peoplepattern.com");
+    configuration.setResourcePath("/v0.2/account_type/");
+    configuration.setEntity(HttpProcessorConfiguration.Entity.ACTOR);
+    configuration.setExtension("account_type");
+  }
 
-    /**
-     Override this to add parameters to the request
-     */
-    @Override
-    protected Map<String, String> prepareParams(StreamsDatum entry) {
-        Activity activity = mapper.convertValue(entry.getDocument(), Activity.class);
-        ActivityObject actor = mapper.convertValue(activity.getActor(), ActivityObject.class);
-        String username = (String) ExtensionUtil.getInstance().getExtension(actor, "screenName");
-        Map<String, String> params = new HashMap<>();
-        params.put("id", actor.getId());
-        params.put("name", actor.getDisplayName());
-        params.put("username", username);
-        params.put("description", actor.getSummary());
-        return params;
-    }
+  /**
+   Override this to add parameters to the request.
+   */
+  @Override
+  protected Map<String, String> prepareParams(StreamsDatum entry) {
+    Activity activity = mapper.convertValue(entry.getDocument(), Activity.class);
+    ActivityObject actor = mapper.convertValue(activity.getActor(), ActivityObject.class);
+    String username = (String) ExtensionUtil.getInstance().getExtension(actor, "screenName");
+    Map<String, String> params = new HashMap<>();
+    params.put("id", actor.getId());
+    params.put("name", actor.getDisplayName());
+    params.put("username", username);
+    params.put("description", actor.getSummary());
+    return params;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/DemographicsProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/DemographicsProcessor.java b/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/DemographicsProcessor.java
index 1ee55a9..e615026 100644
--- a/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/DemographicsProcessor.java
+++ b/streams-contrib/streams-processor-peoplepattern/src/main/java/org/apache/streams/peoplepattern/DemographicsProcessor.java
@@ -26,6 +26,7 @@ import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -33,43 +34,50 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Enrich actor with demographics
+ * Enrich actor with demographics.
  */
 public class DemographicsProcessor extends SimpleHTTPGetProcessor {
 
-    public final static String STREAMS_ID = "DemographicsProcessor";
+  public static final String STREAMS_ID = "DemographicsProcessor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(DemographicsProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(DemographicsProcessor.class);
 
-    public DemographicsProcessor() {
-        this(new ComponentConfigurator<>(HttpProcessorConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("peoplepattern")));
-    }
+  /**
+   * DemographicsProcessor constructor - resolves HttpProcessorConfiguration from JVM 'peoplepattern'.
+   */
+  public DemographicsProcessor() {
+    this(new ComponentConfigurator<>(HttpProcessorConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("peoplepattern")));
+  }
 
-    public DemographicsProcessor(HttpProcessorConfiguration peoplePatternConfiguration) {
-        super(peoplePatternConfiguration);
-        LOGGER.info("creating DemographicsProcessor");
-        configuration.setProtocol("https");
-        configuration.setHostname("api.peoplepattern.com");
-        configuration.setResourcePath("/v0.2/demographics/");
-        configuration.setEntity(HttpProcessorConfiguration.Entity.ACTOR);
-        configuration.setExtension("demographics");
-    }
+  /**
+   * AccountTypeProcessor constructor - uses supplied HttpProcessorConfiguration.
+   * @param peoplePatternConfiguration peoplePatternConfiguration
+   */
+  public DemographicsProcessor(HttpProcessorConfiguration peoplePatternConfiguration) {
+    super(peoplePatternConfiguration);
+    LOGGER.info("creating DemographicsProcessor");
+    configuration.setProtocol("https");
+    configuration.setHostname("api.peoplepattern.com");
+    configuration.setResourcePath("/v0.2/demographics/");
+    configuration.setEntity(HttpProcessorConfiguration.Entity.ACTOR);
+    configuration.setExtension("demographics");
+  }
 
-    /**
-     Override this to add parameters to the request
-     */
-    @Override
-    protected Map<String, String> prepareParams(StreamsDatum entry) {
-        Activity activity = mapper.convertValue(entry.getDocument(), Activity.class);
-        ActivityObject actor = mapper.convertValue(activity.getActor(), ActivityObject.class);
-        String username = (String) ExtensionUtil.getInstance().getExtension(actor, "screenName");
-        Map<String, String> params = new HashMap<>();
-        params.put("id", actor.getId());
-        params.put("name", actor.getDisplayName());
-        params.put("username", username);
-        params.put("description", actor.getSummary());
-        return params;
-    }
+  /**
+   Override this to add parameters to the request.
+   */
+  @Override
+  protected Map<String, String> prepareParams(StreamsDatum entry) {
+    Activity activity = mapper.convertValue(entry.getDocument(), Activity.class);
+    ActivityObject actor = mapper.convertValue(activity.getActor(), ActivityObject.class);
+    String username = (String) ExtensionUtil.getInstance().getExtension(actor, "screenName");
+    Map<String, String> params = new HashMap<>();
+    params.put("id", actor.getId());
+    params.put("name", actor.getDisplayName());
+    params.put("username", username);
+    params.put("description", actor.getSummary());
+    return params;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/AbstractRegexExtensionExtractor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/AbstractRegexExtensionExtractor.java b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/AbstractRegexExtensionExtractor.java
index 0f46ccd..206931f 100644
--- a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/AbstractRegexExtensionExtractor.java
+++ b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/AbstractRegexExtensionExtractor.java
@@ -19,16 +19,18 @@
 
 package org.apache.streams.regex;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.extensions.ExtensionUtil;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,91 +46,93 @@ import java.util.Set;
  * modifying the appropriate {@link org.apache.streams.pojo.json.Activity} extensions object.
  */
 public abstract class AbstractRegexExtensionExtractor<T> implements StreamsProcessor {
-    private final String patternConfigKey;
-    private final String extensionKey;
-    private final String defaultPattern;
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(AbstractRegexExtensionExtractor.class);
+  private final String patternConfigKey;
+  private final String extensionKey;
+  private final String defaultPattern;
 
-    private final static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractRegexExtensionExtractor.class);
 
-    private String pattern;
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    protected AbstractRegexExtensionExtractor(String patternConfigKey, String extensionKey, String defaultPattern) {
-        this.patternConfigKey = patternConfigKey;
-        this.extensionKey = extensionKey;
-        this.defaultPattern = defaultPattern;
-    }
+  private String pattern;
 
-    public String getPattern() {
-        return pattern;
-    }
+  protected AbstractRegexExtensionExtractor(String patternConfigKey, String extensionKey, String defaultPattern) {
+    this.patternConfigKey = patternConfigKey;
+    this.extensionKey = extensionKey;
+    this.defaultPattern = defaultPattern;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        Activity activity;
-        if (entry.getDocument() instanceof Activity) {
-            activity = (Activity) entry.getDocument();
-        } else if (entry.getDocument() instanceof ObjectNode) {
-            activity = mapper.convertValue(entry.getDocument(), Activity.class);
-        } else {
-            return new ArrayList<>();
-        }
-        if (Strings.isNullOrEmpty(pattern)) {
-            prepare(null);
-        }
-        Map<String, List<Integer>> matches = RegexUtils.extractMatches(pattern, activity.getContent());
-        Collection<T> entities = ensureTargetObject(activity);
-        for (String key : matches.keySet()) {
-            entities.add(prepareObject(key));
-        }
-
-        Set<T> set = new HashSet<>();
-        set.addAll(entities);
-        entities.clear();
-        entities.addAll(set);
-
-        entry.setDocument(activity);
-        return Lists.newArrayList(entry);
-    }
+  public String getPattern() {
+    return pattern;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        if (configurationObject instanceof Map) {
-            if (((Map) configurationObject).containsKey(patternConfigKey)) {
-                pattern = (String) ((Map) configurationObject).get(patternConfigKey);
-            }
-        } else if (configurationObject instanceof String) {
-            pattern = (String) configurationObject;
-        } else {
-            pattern = defaultPattern;
-        }
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    Activity activity;
+    if (entry.getDocument() instanceof Activity) {
+      activity = (Activity) entry.getDocument();
+    } else if (entry.getDocument() instanceof ObjectNode) {
+      activity = mapper.convertValue(entry.getDocument(), Activity.class);
+    } else {
+      return new ArrayList<>();
     }
-
-    @Override
-    public void cleanUp() {
-        //NOP
+    if (Strings.isNullOrEmpty(pattern)) {
+      prepare(null);
+    }
+    Map<String, List<Integer>> matches = RegexUtils.extractMatches(pattern, activity.getContent());
+    Collection<T> entities = ensureTargetObject(activity);
+    for (String key : matches.keySet()) {
+      entities.add(prepareObject(key));
     }
 
-    /**
-     * Configures the value to be persisted to the extensions object
-     * @param extracted the value extracted by the regex
-     * @return an object representing the appropriate extension
-     */
-    protected abstract T prepareObject(String extracted);
-
-    @SuppressWarnings("unchecked")
-    protected Collection<T> ensureTargetObject(Activity activity) {
-        Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
-        Set<T> hashtags;
-        if(extensions.containsKey(extensionKey) && extensions.get(extensionKey) != null) {
-            hashtags = Sets.newHashSet((Iterable<T>) extensions.get(extensionKey));
-        } else {
-            hashtags = new HashSet<>();
-        }
-
-        extensions.put(extensionKey, hashtags);
-
-        return hashtags;
+    Set<T> set = new HashSet<>();
+    set.addAll(entities);
+    entities.clear();
+    entities.addAll(set);
+
+    entry.setDocument(activity);
+    return Lists.newArrayList(entry);
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    if (configurationObject instanceof Map) {
+      if (((Map) configurationObject).containsKey(patternConfigKey)) {
+        pattern = (String) ((Map) configurationObject).get(patternConfigKey);
+      }
+    } else if (configurationObject instanceof String) {
+      pattern = (String) configurationObject;
+    } else {
+      pattern = defaultPattern;
+    }
+  }
+
+  @Override
+  public void cleanUp() {
+    //NOP
+  }
+
+  /**
+   * Configures the value to be persisted to the extensions object.
+   * @param extracted the value extracted by the regex
+   * @return an object representing the appropriate extension
+   */
+  protected abstract T prepareObject(String extracted);
+
+  @SuppressWarnings("unchecked")
+  protected Collection<T> ensureTargetObject(Activity activity) {
+    Map<String, Object> extensions = ExtensionUtil.getInstance().ensureExtensions(activity);
+    Set<T> hashtags;
+
+    if (extensions.containsKey(extensionKey) && extensions.get(extensionKey) != null) {
+      hashtags = Sets.newHashSet((Iterable<T>) extensions.get(extensionKey));
+    } else {
+      hashtags = new HashSet<>();
     }
+
+    extensions.put(extensionKey, hashtags);
+
+    return hashtags;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexHashtagExtractor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexHashtagExtractor.java b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexHashtagExtractor.java
index a59193b..326d5fa 100644
--- a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexHashtagExtractor.java
+++ b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexHashtagExtractor.java
@@ -20,6 +20,7 @@
 package org.apache.streams.regex;
 
 import org.apache.streams.core.StreamsProcessor;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -27,27 +28,27 @@ import org.slf4j.LoggerFactory;
  * Processes the content of an {@link org.apache.streams.pojo.json.Activity} object to extract the Hashtags and add
  * them to the appropriate extensions object
  */
-public class RegexHashtagExtractor extends AbstractRegexExtensionExtractor<String> implements StreamsProcessor{
+public class RegexHashtagExtractor extends AbstractRegexExtensionExtractor<String> implements StreamsProcessor {
 
-    private final static String STREAMS_ID = "RegexHashtagExtractor";
+  private static final String STREAMS_ID = "RegexHashtagExtractor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(RegexHashtagExtractor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(RegexHashtagExtractor.class);
 
-    public final static String DEFAULT_PATTERN = "#\\w+";
-    public final static String PATTERN_CONFIG_KEY = "HashtagPattern";
-    public final static String EXTENSION_KEY = "hashtags";
+  public static final String DEFAULT_PATTERN = "#\\w+";
+  public static final String PATTERN_CONFIG_KEY = "HashtagPattern";
+  public static final String EXTENSION_KEY = "hashtags";
 
-    public RegexHashtagExtractor() {
-        super(PATTERN_CONFIG_KEY, EXTENSION_KEY, DEFAULT_PATTERN);
-    }
+  public RegexHashtagExtractor() {
+    super(PATTERN_CONFIG_KEY, EXTENSION_KEY, DEFAULT_PATTERN);
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    protected String prepareObject(String extracted) {
-        return extracted.substring(1);
-    }
+  @Override
+  protected String prepareObject(String extracted) {
+    return extracted.substring(1);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexMentionsExtractor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexMentionsExtractor.java b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexMentionsExtractor.java
index eabdb04..ddd3ed5 100644
--- a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexMentionsExtractor.java
+++ b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexMentionsExtractor.java
@@ -18,9 +18,11 @@
  */
 
 package org.apache.streams.regex;
-import com.google.common.collect.Maps;
+
 import org.apache.streams.core.StreamsProcessor;
 
+import com.google.common.collect.Maps;
+
 import java.util.HashMap;
 import java.util.Map;
 
@@ -30,27 +32,27 @@ import java.util.Map;
  */
 public class RegexMentionsExtractor extends AbstractRegexExtensionExtractor<Map<String, Object>> implements StreamsProcessor {
 
-    private final static String STREAMS_ID = "RegexMentionsExtractor";
+  private static final String STREAMS_ID = "RegexMentionsExtractor";
 
-    public static final String DEFAULT_PATTERN = "@\\w+";
-    public static final String PATTERN_CONFIG_KEY = "MentionPattern";
-    public static final String EXTENSION_KEY = "user_mentions";
-    public static final String DISPLAY_KEY = "displayName";
+  public static final String DEFAULT_PATTERN = "@\\w+";
+  public static final String PATTERN_CONFIG_KEY = "MentionPattern";
+  public static final String EXTENSION_KEY = "user_mentions";
+  public static final String DISPLAY_KEY = "displayName";
 
-    public RegexMentionsExtractor() {
-        super(PATTERN_CONFIG_KEY, EXTENSION_KEY, DEFAULT_PATTERN);
-    }
+  public RegexMentionsExtractor() {
+    super(PATTERN_CONFIG_KEY, EXTENSION_KEY, DEFAULT_PATTERN);
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    protected Map<String, Object> prepareObject(String extracted) {
-        HashMap<String, Object> mention = Maps.newHashMap();
-        mention.put(DISPLAY_KEY, extracted.substring(1));
-        return mention;
-    }
+  @Override
+  protected Map<String, Object> prepareObject(String extracted) {
+    HashMap<String, Object> mention = Maps.newHashMap();
+    mention.put(DISPLAY_KEY, extracted.substring(1));
+    return mention;
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUrlExtractor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUrlExtractor.java b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUrlExtractor.java
index 84d3257..ea8474d 100644
--- a/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUrlExtractor.java
+++ b/streams-contrib/streams-processor-regex/src/main/java/org/apache/streams/regex/RegexUrlExtractor.java
@@ -30,46 +30,46 @@ import java.util.Collection;
  */
 public class RegexUrlExtractor extends AbstractRegexExtensionExtractor<String> implements StreamsProcessor {
 
-    private final static String STREAMS_ID = "RegexUrlExtractor";
+  private static final String STREAMS_ID = "RegexUrlExtractor";
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    //Temporarily copied from streams-processor-urls so as not to force a dependency on that provider.  This should
-    //be moved to a common utility package
-    public final static String DEFAULT_PATTERN =
-            "(?:(?:https?|ftp)://)" +
-                    "(?:\\S+(?::\\S*)?@)?" +
-                    "(?:" +
-                    "(?!(?:10|127)(?:\\.\\d{1,3}){3})" +
-                    "(?!(?:169\\.254|192\\.168)(?:\\.\\d{1,3}){2})" +
-                    "(?!172\\.(?:1[6-9]|2\\d|3[0-1])(?:\\.\\d{1,3}){2})" +
-                    "(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])" +
-                    "(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}" +
-                    "(?:\\.(?:[1-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))" +
-                    "|" +
-                    "(?:(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)" +
-                    "(?:\\.(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)*" +
-                    "(?:\\.(?:[a-z\\u00a1-\\uffff]{2,}))" +
-                    ")" +
-                    "(?::\\d{2,5})?" +
-                    "(?:/[^\\s]*)?";
+  //Temporarily copied from streams-processor-urls so as not to force a dependency on that provider.  This should
+  //be moved to a common utility package
+  public static final String DEFAULT_PATTERN =
+      "(?:(?:https?|ftp)://)"
+          + "(?:\\S+(?::\\S*)?@)?"
+          + "(?:"
+          + "(?!(?:10|127)(?:\\.\\d{1,3}){3})"
+          + "(?!(?:169\\.254|192\\.168)(?:\\.\\d{1,3}){2})"
+          + "(?!172\\.(?:1[6-9]|2\\d|3[0-1])(?:\\.\\d{1,3}){2})"
+          + "(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])"
+          + "(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}"
+          + "(?:\\.(?:[1-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))"
+          + "|"
+          + "(?:(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)"
+          + "(?:\\.(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)*"
+          + "(?:\\.(?:[a-z\\u00a1-\\uffff]{2,}))"
+          + ")"
+          + "(?::\\d{2,5})?"
+          + "(?:/[^\\s]*)?";
 
-    public final static String PATTERN_CONFIG_KEY = "URLPattern";
+  public static final String PATTERN_CONFIG_KEY = "URLPattern";
 
-    public RegexUrlExtractor() {
-        super(PATTERN_CONFIG_KEY, null, DEFAULT_PATTERN);
-    }
+  public RegexUrlExtractor() {
+    super(PATTERN_CONFIG_KEY, null, DEFAULT_PATTERN);
+  }
 
-    @Override
-    protected String prepareObject(String extracted) {
-        return extracted;
-    }
+  @Override
+  protected String prepareObject(String extracted) {
+    return extracted;
+  }
 
-    @Override
-    protected Collection<String> ensureTargetObject(Activity activity) {
-        return activity.getLinks();
-    }
+  @Override
+  protected Collection<String> ensureTargetObject(Activity activity) {
+    return activity.getLinks();
+  }
 }


[13/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGeneratorMojo.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGeneratorMojo.java b/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGeneratorMojo.java
index 1485023..ea512b3 100644
--- a/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGeneratorMojo.java
+++ b/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGeneratorMojo.java
@@ -16,6 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.elasticsearch;
 
 import org.apache.maven.plugin.AbstractMojo;
@@ -33,58 +34,59 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.util.List;
 
-@Mojo(  name = "generate-resources",
-        defaultPhase = LifecyclePhase.GENERATE_RESOURCES
-)
-@Execute(   goal = "generate-resources",
-            phase = LifecyclePhase.GENERATE_RESOURCES
-)
+@Mojo (  name = "generate-resources",
+    defaultPhase = LifecyclePhase.GENERATE_RESOURCES
+    )
+@Execute (   goal = "generate-resources",
+    phase = LifecyclePhase.GENERATE_RESOURCES
+    )
 public class StreamsElasticsearchResourceGeneratorMojo extends AbstractMojo {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsElasticsearchResourceGeneratorMojo.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsElasticsearchResourceGeneratorMojo.class);
 
-    private volatile MojoFailureException mojoFailureException;
+  private volatile MojoFailureException mojoFailureException;
 
-    @Component
-    private MavenProject project;
+  @Component
+  private MavenProject project;
 
-//    @Component
-//    private Settings settings;
-//
-//    @Parameter( defaultValue = "${localRepository}", readonly = true, required = true )
-//    protected ArtifactRepository localRepository;
-//
-//    @Parameter( defaultValue = "${plugin}", readonly = true ) // Maven 3 only
-//    private PluginDescriptor plugin;
-//
-    @Parameter( defaultValue = "${project.basedir}", readonly = true )
-    private File basedir;
+  //    @Component
+  //    private Settings settings;
+  //
+  //    @Parameter( defaultValue = "${localRepository}", readonly = true, required = true )
+  //    protected ArtifactRepository localRepository;
+  //
+  //    @Parameter( defaultValue = "${plugin}", readonly = true ) // Maven 3 only
+  //    private PluginDescriptor plugin;
+  //
+  @Parameter( defaultValue = "${project.basedir}", readonly = true )
+  private File basedir;
 
-    @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
-    public String sourceDirectory;
+  @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
+  public String sourceDirectory;
 
-    @Parameter( readonly = true ) // Maven 3 only
-    public List<String> sourcePaths;
+  @Parameter( readonly = true ) // Maven 3 only
+  public List<String> sourcePaths;
 
-    @Parameter(defaultValue = "./target/generated-resources/streams-plugin-elasticsearch", readonly = true)
-    public String targetDirectory;
+  @Parameter(defaultValue = "./target/generated-resources/streams-plugin-elasticsearch", readonly = true)
+  public String targetDirectory;
 
-    public void execute() throws MojoExecutionException, MojoFailureException {
+  public void execute() throws MojoExecutionException, MojoFailureException {
 
-        //addProjectDependenciesToClasspath();
+    //addProjectDependenciesToClasspath();
 
-        StreamsElasticsearchGenerationConfig config = new StreamsElasticsearchGenerationConfig();
+    StreamsElasticsearchGenerationConfig config = new StreamsElasticsearchGenerationConfig();
 
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            config.setSourcePaths(sourcePaths);
-        else
-            config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      config.setSourcePaths(sourcePaths);
+    } else {
+      config.setSourceDirectory(sourceDirectory);
+    }
+    config.setTargetDirectory(targetDirectory);
 
-        StreamsElasticsearchResourceGenerator streamsElasticsearchResourceGenerator = new StreamsElasticsearchResourceGenerator(config);
+    StreamsElasticsearchResourceGenerator streamsElasticsearchResourceGenerator = new StreamsElasticsearchResourceGenerator(config);
 
-        streamsElasticsearchResourceGenerator.run();
+    streamsElasticsearchResourceGenerator.run();
 
-    }
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorCLITest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorCLITest.java b/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorCLITest.java
index 809b9a0..887461c 100644
--- a/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorCLITest.java
+++ b/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorCLITest.java
@@ -16,11 +16,13 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.elasticsearch.test;
 
+import org.apache.streams.plugins.elasticsearch.StreamsElasticsearchResourceGenerator;
+
 import com.google.common.collect.Lists;
 import com.google.common.io.Files;
-import org.apache.streams.plugins.elasticsearch.StreamsElasticsearchResourceGenerator;
 import org.junit.Test;
 
 import java.io.File;
@@ -30,28 +32,28 @@ import java.util.List;
 import static org.apache.streams.plugins.elasticsearch.test.StreamsElasticsearchResourceGeneratorTest.jsonFilter;
 
 /**
- * Created by sblackmon on 5/5/16.
+ * Test that StreamsElasticsearchResourceGeneratorCLI generates resources.
  */
 public class StreamsElasticsearchResourceGeneratorCLITest {
 
-    @Test
-    public void testStreamsElasticsearchResourceGeneratorCLI() throws Exception {
+  @Test
+  public void testStreamsElasticsearchResourceGeneratorCLI() throws Exception {
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
-        String targetDirectory = "target/generated-resources/elasticsearch-cli";
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    String targetDirectory = "target/generated-resources/elasticsearch-cli";
 
-        List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
-        StreamsElasticsearchResourceGenerator.main(argsList.toArray(new String[0]));
+    List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
+    StreamsElasticsearchResourceGenerator.main(argsList.toArray(new String[0]));
 
-        File testOutput = new File(targetDirectory);
+    File testOutput = new File(targetDirectory);
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(jsonFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(jsonFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorMojoIT.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorMojoIT.java b/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorMojoIT.java
index 2a24846..a0fbb99 100644
--- a/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorMojoIT.java
+++ b/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorMojoIT.java
@@ -16,6 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.elasticsearch.test;
 
 import com.google.common.collect.Lists;
@@ -31,38 +32,37 @@ import java.util.ArrayList;
 import java.util.List;
 
 /**
- * Tests that streams-plugin-hive running via maven generates hql resources
+ * Tests that streams-plugin-elasticsearch running via maven generates elasticsearch mapping resources.
  */
 public class StreamsElasticsearchResourceGeneratorMojoIT extends TestCase {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsElasticsearchResourceGeneratorMojoIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsElasticsearchResourceGeneratorMojoIT.class);
 
-    protected void setUp() throws Exception
-    {
-        // required for mojo lookups to work
-        super.setUp();
-    }
+  protected void setUp() throws Exception {
+    // required for mojo lookups to work
+    super.setUp();
+  }
 
 
-    @Test
-    public void testStreamsElasticsearchResourceGeneratorMojo() throws Exception {
+  @Test
+  public void testStreamsElasticsearchResourceGeneratorMojo() throws Exception {
 
-        File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-elasticsearch" );
+    File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-elasticsearch" );
 
-        Verifier verifier;
+    Verifier verifier;
 
-        verifier = new Verifier( testDir.getAbsolutePath() );
+    verifier = new Verifier( testDir.getAbsolutePath() );
 
-        List cliOptions = new ArrayList();
-        cliOptions.add( "-N" );
-        verifier.executeGoals( Lists.<String>newArrayList(
-                "clean",
-                "dependency:unpack-dependencies",
-                "generate-resources"));
+    List cliOptions = new ArrayList();
+    cliOptions.add( "-N" );
+    verifier.executeGoals( Lists.<String>newArrayList(
+        "clean",
+        "dependency:unpack-dependencies",
+        "generate-resources"));
 
-        verifier.verifyErrorFreeLog();
+    verifier.verifyErrorFreeLog();
 
-        verifier.resetStreams();
+    verifier.resetStreams();
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorTest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorTest.java b/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorTest.java
index 9c1eed4..4322b11 100644
--- a/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorTest.java
+++ b/streams-plugins/streams-plugin-elasticsearch/src/test/java/org/apache/streams/plugins/elasticsearch/test/StreamsElasticsearchResourceGeneratorTest.java
@@ -16,24 +16,26 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.elasticsearch.test;
 
+import org.apache.streams.plugins.elasticsearch.StreamsElasticsearchGenerationConfig;
+import org.apache.streams.plugins.elasticsearch.StreamsElasticsearchResourceGenerator;
+
 import com.google.common.base.Predicate;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.common.io.Files;
 import org.apache.commons.io.FileUtils;
-import org.apache.streams.plugins.elasticsearch.StreamsElasticsearchGenerationConfig;
-import org.apache.streams.plugins.elasticsearch.StreamsElasticsearchResourceGenerator;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Collection;
 import java.util.Iterator;
+import javax.annotation.Nullable;
 
 import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
 
@@ -42,87 +44,91 @@ import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
  */
 public class StreamsElasticsearchResourceGeneratorTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsElasticsearchResourceGeneratorTest.class);
-
-    public static final Predicate<File> jsonFilter = new Predicate<File>() {
-        @Override
-        public boolean apply(@Nullable File file) {
-            if( file.getName().endsWith(".json") )
-                return true;
-            else return false;
-        }
-    };
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsElasticsearchResourceGeneratorTest.class);
 
-    /**
-     * Test that Elasticsearch resources are generated
-     *
-     * @throws Exception
-     */
-    @Test
-    public void StreamsElasticsearchResourceGenerator() throws Exception {
-
-        StreamsElasticsearchGenerationConfig config = new StreamsElasticsearchGenerationConfig();
-
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
-
-        config.setSourceDirectory(sourceDirectory);
-
-        config.setTargetDirectory("target/generated-resources/elasticsearch");
-
-        config.setExclusions(Sets.newHashSet("attachments"));
+  public static final Predicate<File> jsonFilter = new Predicate<File>() {
+    @Override
+    public boolean apply(@Nullable File file) {
+      if ( file.getName().endsWith(".json") ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  };
 
-        config.setMaxDepth(2);
+  /**
+   * Test that Elasticsearch resources are generated.
+   *
+   * @throws Exception Exception
+   */
+  @Test
+  public void StreamsElasticsearchResourceGenerator() throws Exception {
 
-        StreamsElasticsearchResourceGenerator streamsElasticsearchResourceGenerator = new StreamsElasticsearchResourceGenerator(config);
-        streamsElasticsearchResourceGenerator.run();
+    StreamsElasticsearchGenerationConfig config = new StreamsElasticsearchGenerationConfig();
 
-        File testOutput = config.getTargetDirectory();
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
 
-        Predicate<File> jsonFilter = new Predicate<File>() {
-            @Override
-            public boolean apply(@Nullable File file) {
-                if( file.getName().endsWith(".json") )
-                    return true;
-                else return false;
-            }
-        };
+    config.setSourceDirectory(sourceDirectory);
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    config.setTargetDirectory("target/generated-resources/elasticsearch");
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(jsonFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
+    config.setExclusions(Sets.newHashSet("attachments"));
 
-        String expectedDirectory = "target/test-classes/expected";
-        File testExpected = new File( expectedDirectory );
+    config.setMaxDepth(2);
 
-        Iterable<File> expectedIterator = Files.fileTreeTraverser().breadthFirstTraversal(testExpected)
-                .filter(jsonFilter);
-        Collection<File> expectedCollection = Lists.newArrayList(expectedIterator);
+    StreamsElasticsearchResourceGenerator streamsElasticsearchResourceGenerator = new StreamsElasticsearchResourceGenerator(config);
+    streamsElasticsearchResourceGenerator.run();
 
-        int fails = 0;
+    File testOutput = config.getTargetDirectory();
 
-        Iterator<File> iterator = expectedCollection.iterator();
-        while( iterator.hasNext() ) {
-            File objectExpected = iterator.next();
-            String expectedEnd = dropSourcePathPrefix(objectExpected.getAbsolutePath(),  expectedDirectory);
-            File objectActual = new File(config.getTargetDirectory() + "/" + expectedEnd);
-            LOGGER.info("Comparing: {} and {}", objectExpected.getAbsolutePath(), objectActual.getAbsolutePath());
-            assert( objectActual.exists());
-            if( FileUtils.contentEquals(objectActual, objectExpected) == true ) {
-                LOGGER.info("Exact Match!");
-            } else {
-                LOGGER.info("No Match!");
-                fails++;
-            }
-        }
-        if( fails > 0 ) {
-            LOGGER.info("Fails: {}", fails);
-            Assert.fail();
+    Predicate<File> jsonFilter = new Predicate<File>() {
+      @Override
+      public boolean apply(@Nullable File file) {
+        if ( file.getName().endsWith(".json") ) {
+          return true;
+        } else {
+          return false;
         }
+      }
+    };
 
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
+
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(jsonFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+
+    String expectedDirectory = "target/test-classes/expected";
+    File testExpected = new File( expectedDirectory );
+
+    Iterable<File> expectedIterator = Files.fileTreeTraverser().breadthFirstTraversal(testExpected)
+        .filter(jsonFilter);
+    Collection<File> expectedCollection = Lists.newArrayList(expectedIterator);
+
+    int fails = 0;
+
+    Iterator<File> iterator = expectedCollection.iterator();
+    while ( iterator.hasNext() ) {
+      File objectExpected = iterator.next();
+      String expectedEnd = dropSourcePathPrefix(objectExpected.getAbsolutePath(),  expectedDirectory);
+      File objectActual = new File(config.getTargetDirectory() + "/" + expectedEnd);
+      LOGGER.info("Comparing: {} and {}", objectExpected.getAbsolutePath(), objectActual.getAbsolutePath());
+      assert ( objectActual.exists());
+      if ( FileUtils.contentEquals(objectActual, objectExpected) == true ) {
+        LOGGER.info("Exact Match!");
+      } else {
+        LOGGER.info("No Match!");
+        fails++;
+      }
     }
+    if ( fails > 0 ) {
+      LOGGER.info("Fails: {}", fails);
+      Assert.fail();
+    }
+
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseGenerationConfig.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseGenerationConfig.java b/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseGenerationConfig.java
index 977c3d9..d939372 100644
--- a/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseGenerationConfig.java
+++ b/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseGenerationConfig.java
@@ -20,6 +20,7 @@
 package org.apache.streams.plugins.hbase;
 
 import org.apache.streams.util.schema.GenerationConfig;
+
 import org.jsonschema2pojo.DefaultGenerationConfig;
 import org.jsonschema2pojo.util.URLUtil;
 
@@ -33,77 +34,80 @@ import java.util.List;
 import java.util.Set;
 
 /**
- * Configures StreamsHiveResourceGenerator
- *
- *
+ * Configures StreamsHiveResourceGenerator.
  */
 public class StreamsHbaseGenerationConfig extends DefaultGenerationConfig implements GenerationConfig {
 
-    public String getSourceDirectory() {
-        return sourceDirectory;
+  public String getSourceDirectory() {
+    return sourceDirectory;
+  }
+
+  public List<String> getSourcePaths() {
+    return sourcePaths;
+  }
+
+  private String columnFamily;
+  private String sourceDirectory;
+  private List<String> sourcePaths = new ArrayList<String>();
+  private String targetDirectory;
+  private int maxDepth = 1;
+
+  public Set<String> getExclusions() {
+    return exclusions;
+  }
+
+  public void setExclusions(Set<String> exclusions) {
+    this.exclusions = exclusions;
+  }
+
+  private Set<String> exclusions = new HashSet<String>();
+
+  public int getMaxDepth() {
+    return maxDepth;
+  }
+
+  public void setSourceDirectory(String sourceDirectory) {
+    this.sourceDirectory = sourceDirectory;
+  }
+
+  public void setSourcePaths(List<String> sourcePaths) {
+    this.sourcePaths = sourcePaths;
+  }
+
+  public void setTargetDirectory(String targetDirectory) {
+    this.targetDirectory = targetDirectory;
+  }
+
+  public File getTargetDirectory() {
+    return new File(targetDirectory);
+  }
+
+  /**
+   * get all sources.
+   * @return Iterator of URL
+   */
+  public Iterator<URL> getSource() {
+    if (null != sourceDirectory) {
+      return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
     }
-
-    public List<String> getSourcePaths() {
-        return sourcePaths;
+    List<URL> sourceUrls = new ArrayList<URL>();
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      for (String source : sourcePaths) {
+        sourceUrls.add(URLUtil.parseURL(source));
+      }
     }
+    return sourceUrls.iterator();
+  }
 
-    private String columnFamily;
-    private String sourceDirectory;
-    private List<String> sourcePaths = new ArrayList<String>();
-    private String targetDirectory;
-    private int maxDepth = 1;
+  public void setMaxDepth(int maxDepth) {
+    this.maxDepth = maxDepth;
+  }
 
-    public Set<String> getExclusions() {
-        return exclusions;
-    }
-
-    public void setExclusions(Set<String> exclusions) {
-        this.exclusions = exclusions;
-    }
+  public String getColumnFamily() {
+    return columnFamily;
+  }
 
-    private Set<String> exclusions = new HashSet<String>();
-
-    public int getMaxDepth() {
-        return maxDepth;
-    }
-
-    public void setSourceDirectory(String sourceDirectory) {
-        this.sourceDirectory = sourceDirectory;
-    }
-
-    public void setSourcePaths(List<String> sourcePaths) {
-        this.sourcePaths = sourcePaths;
-    }
-
-    public void setTargetDirectory(String targetDirectory) {
-        this.targetDirectory = targetDirectory;
-    }
-
-    public File getTargetDirectory() {
-        return new File(targetDirectory);
-    }
-
-    public Iterator<URL> getSource() {
-        if (null != sourceDirectory) {
-            return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
-        }
-        List<URL> sourceURLs = new ArrayList<URL>();
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            for (String source : sourcePaths) {
-                sourceURLs.add(URLUtil.parseURL(source));
-            }
-        return sourceURLs.iterator();
-    }
-
-    public void setMaxDepth(int maxDepth) {
-        this.maxDepth = maxDepth;
-    }
-
-    public String getColumnFamily() {
-        return columnFamily;
-    }
-
-    public void setColumnFamily(String columnFamily) {
-        this.columnFamily = columnFamily;
-    }
+  public void setColumnFamily(String columnFamily) {
+    this.columnFamily = columnFamily;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGenerator.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGenerator.java b/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGenerator.java
index 9f96fb8..cbea67f 100644
--- a/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGenerator.java
+++ b/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGenerator.java
@@ -19,16 +19,17 @@
 
 package org.apache.streams.plugins.hbase;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
 import org.apache.streams.util.schema.FieldType;
 import org.apache.streams.util.schema.FieldUtil;
 import org.apache.streams.util.schema.GenerationConfig;
 import org.apache.streams.util.schema.Schema;
 import org.apache.streams.util.schema.SchemaStore;
 import org.apache.streams.util.schema.SchemaStoreImpl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
 import org.jsonschema2pojo.util.URLUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -52,161 +53,188 @@ import static org.apache.streams.util.schema.FileUtil.writeFile;
  */
 public class StreamsHbaseResourceGenerator implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsHbaseResourceGenerator.class);
-
-    private final static String LS = System.getProperty("line.separator");
-
-    private StreamsHbaseGenerationConfig config;
-
-    private SchemaStore schemaStore = new SchemaStoreImpl();
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsHbaseResourceGenerator.class);
 
-    private int currentDepth = 0;
+  private static final String LS = System.getProperty("line.separator");
 
-    public static void main(String[] args) {
-        StreamsHbaseGenerationConfig config = new StreamsHbaseGenerationConfig();
+  private StreamsHbaseGenerationConfig config;
 
-        String sourceDirectory = "src/main/jsonschema";
-        String targetDirectory = "target/generated-resources/hbase";
+  private SchemaStore schemaStore = new SchemaStoreImpl();
 
-        if( args.length > 0 )
-            sourceDirectory = args[0];
-        if( args.length > 1 )
-            targetDirectory = args[1];
+  private int currentDepth = 0;
 
-        config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
+  /**
+   * Run from CLI without Maven
+   *
+   * <p/>
+   * java -jar streams-plugin-hbase-jar-with-dependencies.jar StreamsHbaseResourceGenerator src/main/jsonschema target/generated-resources
+   *
+   * @param args [sourceDirectory, targetDirectory]
+   * */
+  public static void main(String[] args) {
+    StreamsHbaseGenerationConfig config = new StreamsHbaseGenerationConfig();
 
-        StreamsHbaseResourceGenerator streamsHbaseResourceGenerator = new StreamsHbaseResourceGenerator(config);
-        streamsHbaseResourceGenerator.run();
+    String sourceDirectory = "src/main/jsonschema";
+    String targetDirectory = "target/generated-resources/hbase";
 
+    if ( args.length > 0 ) {
+      sourceDirectory = args[0];
     }
-
-    public StreamsHbaseResourceGenerator(StreamsHbaseGenerationConfig config) {
-        this.config = config;
+    if ( args.length > 1 ) {
+      targetDirectory = args[1];
     }
 
-    public void run() {
-
-        checkNotNull(config);
+    config.setSourceDirectory(sourceDirectory);
+    config.setTargetDirectory(targetDirectory);
 
-        generate(config);
+    StreamsHbaseResourceGenerator streamsHbaseResourceGenerator = new StreamsHbaseResourceGenerator(config);
+    streamsHbaseResourceGenerator.run();
 
-    }
+  }
 
-    public void generate(StreamsHbaseGenerationConfig config) {
+  public StreamsHbaseResourceGenerator(StreamsHbaseGenerationConfig config) {
+    this.config = config;
+  }
 
-        LinkedList<File> sourceFiles = new LinkedList<File>();
+  @Override
+  public void run() {
 
-        for (Iterator<URL> sources = config.getSource(); sources.hasNext();) {
-            URL source = sources.next();
-            sourceFiles.add(URLUtil.getFileFromURL(source));
-        }
+    checkNotNull(config);
 
-        LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
+    generate(config);
 
-        resolveRecursive((GenerationConfig)config, sourceFiles);
+  }
 
-        LOGGER.info("Resolved {} schema files:", sourceFiles.size());
+  /**
+   * run generate using supplied StreamsHbaseGenerationConfig.
+   * @param config StreamsHbaseGenerationConfig
+   */
+  public void generate(StreamsHbaseGenerationConfig config) {
 
-        for (Iterator<File> iterator = sourceFiles.iterator(); iterator.hasNext();) {
-            File item = iterator.next();
-            schemaStore.create(item.toURI());
-        }
+    LinkedList<File> sourceFiles = new LinkedList<File>();
 
-        LOGGER.info("Identified {} objects:", schemaStore.getSize());
+    for (Iterator<URL> sources = config.getSource(); sources.hasNext();) {
+      URL source = sources.next();
+      sourceFiles.add(URLUtil.getFileFromURL(source));
+    }
 
-        for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
-            Schema schema = schemaIterator.next();
-            currentDepth = 0;
-            if( schema.getURI().getScheme().equals("file")) {
-                String inputFile = schema.getURI().getPath();
-                String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-                for (String sourcePath : config.getSourcePaths()) {
-                    resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
-                }
-                String outputFile = config.getTargetDirectory() + "/" + swapExtension(resourcePath, "json", "txt");
+    LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
 
-                LOGGER.info("Processing {}:", resourcePath);
+    resolveRecursive((GenerationConfig)config, sourceFiles);
 
-                String resourceId = dropExtension(resourcePath).replace("/", "_");
+    LOGGER.info("Resolved {} schema files:", sourceFiles.size());
 
-                String resourceContent = generateResource(schema, resourceId);
+    for (Iterator<File> iterator = sourceFiles.iterator(); iterator.hasNext();) {
+      File item = iterator.next();
+      schemaStore.create(item.toURI());
+    }
 
-                writeFile(outputFile, resourceContent);
+    LOGGER.info("Identified {} objects:", schemaStore.getSize());
 
-                LOGGER.info("Wrote {}:", outputFile);
-            }
+    for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
+      Schema schema = schemaIterator.next();
+      currentDepth = 0;
+      if ( schema.getUri().getScheme().equals("file")) {
+        String inputFile = schema.getUri().getPath();
+        String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+        for (String sourcePath : config.getSourcePaths()) {
+          resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
         }
+        String outputFile = config.getTargetDirectory() + "/" + swapExtension(resourcePath, "json", "txt");
 
-    }
+        LOGGER.info("Processing {}:", resourcePath);
 
-    public String generateResource(Schema schema, String resourceId) {
-        StringBuilder resourceBuilder = new StringBuilder();
-        resourceBuilder.append("CREATE ");
-        resourceBuilder = appendRootObject(resourceBuilder, schema, resourceId);
-        return resourceBuilder.toString();
-    }
+        String resourceId = dropExtension(resourcePath).replace("/", "_");
 
-    public StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId) {
-        checkNotNull(builder);
-        ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
-        if( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0) {
-
-            List<String> fieldStrings = Lists.newArrayList();
-
-            // table
-            fieldStrings.add(hbaseEscape(schemaSymbol(schema)));
-
-            // column family
-            fieldStrings.add(hbaseEscape(schemaSymbol(schema)));
-
-            // parent column family
-            if( schema.getParent() != null )
-                fieldStrings.add(hbaseEscape(schemaSymbol(schema.getParent())));
-
-            // sub-object column families
-            if( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0 ) {
-
-                Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
-                Joiner joiner = Joiner.on(", ").skipNulls();
-                for( ; fields.hasNext(); ) {
-                    Map.Entry<String, JsonNode> field = fields.next();
-                    String fieldId = field.getKey();
-                    if( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
-                        ObjectNode fieldNode = (ObjectNode) field.getValue();
-                        FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
-                        if (fieldType != null ) {
-                            switch (fieldType) {
-                                case OBJECT:
-                                    fieldStrings.add(hbaseEscape(fieldId));
-                            }
-                        }
-                    }
-                }
-                builder.append(joiner.join(fieldStrings));
+        String resourceContent = generateResource(schema, resourceId);
 
-            }
-        }
-        checkNotNull(builder);
-        return builder;
-    }
+        writeFile(outputFile, resourceContent);
 
-    private static String hbaseEscape( String fieldId ) {
-        return "'"+fieldId+"'";
+        LOGGER.info("Wrote {}:", outputFile);
+      }
     }
 
-    private String schemaSymbol( Schema schema ) {
-        if (schema == null) return null;
-        if (schema.getURI().getScheme().equals("file")) {
-            String inputFile = schema.getURI().getPath();
-            String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-            for (String sourcePath : config.getSourcePaths()) {
-                resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
+  }
+
+  /**
+   * generateResource String from schema and resourceId.
+   * @param schema Schema
+   * @param resourceId String
+   * @return mapping
+   */
+  public String generateResource(Schema schema, String resourceId) {
+    StringBuilder resourceBuilder = new StringBuilder();
+    resourceBuilder.append("CREATE ");
+    resourceBuilder = appendRootObject(resourceBuilder, schema, resourceId);
+    return resourceBuilder.toString();
+  }
+
+  protected StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId) {
+    checkNotNull(builder);
+    ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
+    if ( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0) {
+
+      List<String> fieldStrings = Lists.newArrayList();
+
+      // table
+      fieldStrings.add(hbaseEscape(schemaSymbol(schema)));
+
+      // column family
+      fieldStrings.add(hbaseEscape(schemaSymbol(schema)));
+
+      // parent column family
+      if ( schema.getParent() != null ) {
+        fieldStrings.add(hbaseEscape(schemaSymbol(schema.getParent())));
+      }
+
+      // sub-object column families
+      if ( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0 ) {
+
+        Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
+        Joiner joiner = Joiner.on(", ").skipNulls();
+        for ( ; fields.hasNext(); ) {
+          Map.Entry<String, JsonNode> field = fields.next();
+          String fieldId = field.getKey();
+          if ( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
+            ObjectNode fieldNode = (ObjectNode) field.getValue();
+            FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
+            if (fieldType != null ) {
+              switch (fieldType) {
+                case OBJECT:
+                  fieldStrings.add(hbaseEscape(fieldId));
+                  break;
+                default:
+                  break;
+              }
             }
-            return dropExtension(resourcePath).replace("/", "_");
-        } else {
-            return "IDK";
+          }
         }
+        builder.append(joiner.join(fieldStrings));
+
+      }
+    }
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private static String hbaseEscape( String fieldId ) {
+    return "'" + fieldId + "'";
+  }
+
+  private String schemaSymbol( Schema schema ) {
+    if (schema == null) {
+      return null;
+    }
+    if (schema.getUri().getScheme().equals("file")) {
+      String inputFile = schema.getUri().getPath();
+      String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+      for (String sourcePath : config.getSourcePaths()) {
+        resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
+      }
+      return dropExtension(resourcePath).replace("/", "_");
+    } else {
+      return "IDK";
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGeneratorMojo.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGeneratorMojo.java b/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGeneratorMojo.java
index 24a6b88..addc657 100644
--- a/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGeneratorMojo.java
+++ b/streams-plugins/streams-plugin-hbase/src/main/java/org/apache/streams/plugins/hbase/StreamsHbaseResourceGeneratorMojo.java
@@ -34,57 +34,65 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.util.List;
 
-@Mojo(  name = "generate-resources",
-        defaultPhase = LifecyclePhase.GENERATE_RESOURCES
-)
-@Execute(   goal = "generate-resources",
-            phase = LifecyclePhase.GENERATE_RESOURCES
-)
+@Mojo (
+    name = "generate-resources",
+    defaultPhase = LifecyclePhase.GENERATE_RESOURCES
+    )
+@Execute (
+    goal = "generate-resources",
+    phase = LifecyclePhase.GENERATE_RESOURCES
+    )
 public class StreamsHbaseResourceGeneratorMojo extends AbstractMojo {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsHbaseResourceGeneratorMojo.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsHbaseResourceGeneratorMojo.class);
 
-    private volatile MojoFailureException mojoFailureException;
+  private volatile MojoFailureException mojoFailureException;
 
-    @Component
-    private MavenProject project;
+  @Component
+  private MavenProject project;
 
-//    @Component
-//    private Settings settings;
-//
-//    @Parameter( defaultValue = "${localRepository}", readonly = true, required = true )
-//    protected ArtifactRepository localRepository;
-//
-//    @Parameter( defaultValue = "${plugin}", readonly = true ) // Maven 3 only
-//    private PluginDescriptor plugin;
-//
-    @Parameter( defaultValue = "${project.basedir}", readonly = true )
-    private File basedir;
+  //    @Component
+  //    private Settings settings;
+  //
+  //    @Parameter( defaultValue = "${localRepository}", readonly = true, required = true )
+  //    protected ArtifactRepository localRepository;
+  //
+  //    @Parameter( defaultValue = "${plugin}", readonly = true ) // Maven 3 only
+  //    private PluginDescriptor plugin;
+  //
+  @Parameter( defaultValue = "${project.basedir}", readonly = true )
+  private File basedir;
 
-    @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
-    public String sourceDirectory;
+  @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
+  public String sourceDirectory;
 
-    @Parameter( readonly = true ) // Maven 3 only
-    public List<String> sourcePaths;
+  @Parameter( readonly = true ) // Maven 3 only
+  public List<String> sourcePaths;
 
-    @Parameter(defaultValue = "./target/generated-resources/hbase", readonly = true)
-    public String targetDirectory;
+  @Parameter(defaultValue = "./target/generated-resources/hbase", readonly = true)
+  public String targetDirectory;
 
-    public void execute() throws MojoExecutionException, MojoFailureException {
+  /**
+   * execute StreamsHbaseResourceGenerator mojo.
+   * @throws MojoExecutionException MojoExecutionException
+   * @throws MojoFailureException MojoFailureException
+   */
+  public void execute() throws MojoExecutionException, MojoFailureException {
 
-        //addProjectDependenciesToClasspath();
+    //addProjectDependenciesToClasspath();
 
-        StreamsHbaseGenerationConfig config = new StreamsHbaseGenerationConfig();
+    StreamsHbaseGenerationConfig config = new StreamsHbaseGenerationConfig();
 
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            config.setSourcePaths(sourcePaths);
-        else
-            config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      config.setSourcePaths(sourcePaths);
+    } else {
+      config.setSourceDirectory(sourceDirectory);
+    }
+    config.setTargetDirectory(targetDirectory);
 
-        StreamsHbaseResourceGenerator streamsHbaseResourceGenerator = new StreamsHbaseResourceGenerator(config);
-        streamsHbaseResourceGenerator.run();
+    StreamsHbaseResourceGenerator streamsHbaseResourceGenerator = new StreamsHbaseResourceGenerator(config);
+    streamsHbaseResourceGenerator.run();
 
-    }
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorCLITest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorCLITest.java b/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorCLITest.java
index 3c223eb..254bd0e 100644
--- a/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorCLITest.java
+++ b/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorCLITest.java
@@ -19,13 +19,12 @@
 
 package org.apache.streams.plugins.test;
 
-import com.google.common.base.Predicate;
+import org.apache.streams.plugins.hbase.StreamsHbaseResourceGenerator;
+
 import com.google.common.collect.Lists;
 import com.google.common.io.Files;
-import org.apache.streams.plugins.hbase.StreamsHbaseResourceGenerator;
 import org.junit.Test;
 
-import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Collection;
 import java.util.List;
@@ -33,28 +32,28 @@ import java.util.List;
 import static org.apache.streams.plugins.test.StreamsHbaseResourceGeneratorTest.txtFilter;
 
 /**
- * Created by sblackmon on 5/5/16.
+ * Test that StreamsHbaseResourceGeneratorCLI generates resources.
  */
 public class StreamsHbaseResourceGeneratorCLITest {
 
-    @Test
-    public void testStreamsHiveResourceGeneratorCLI() throws Exception {
+  @Test
+  public void testStreamsHbaseResourceGeneratorCLI() throws Exception {
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
-        String targetDirectory = "target/generated-resources/hbase-cli";
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    String targetDirectory = "target/generated-resources/hbase-cli";
 
-        List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
-        StreamsHbaseResourceGenerator.main(argsList.toArray(new String[0]));
+    List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
+    StreamsHbaseResourceGenerator.main(argsList.toArray(new String[0]));
 
-        File testOutput = new File(targetDirectory);
+    File testOutput = new File(targetDirectory);
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(txtFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(txtFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorMojoIT.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorMojoIT.java b/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorMojoIT.java
index 1495bc1..ff4e49a 100644
--- a/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorMojoIT.java
+++ b/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorMojoIT.java
@@ -36,48 +36,47 @@ import java.util.List;
 import static org.apache.streams.plugins.test.StreamsHbaseResourceGeneratorTest.txtFilter;
 
 /**
- * Tests that streams-plugin-hive running via maven generates hql resources
+ * Tests that streams-plugin-hbase running via maven generates txt resources.
  */
 public class StreamsHbaseResourceGeneratorMojoIT extends TestCase {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsHbaseResourceGeneratorMojoIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsHbaseResourceGeneratorMojoIT.class);
 
-    protected void setUp() throws Exception
-    {
-        // required for mojo lookups to work
-        super.setUp();
-    }
+  protected void setUp() throws Exception {
+    // required for mojo lookups to work
+    super.setUp();
+  }
 
 
-    @Test
-    public void testStreamsHbaseResourceGeneratorMojo() throws Exception {
+  @Test
+  public void testStreamsHbaseResourceGeneratorMojo() throws Exception {
 
-        File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-hbase" );
+    File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-hbase" );
 
-        Verifier verifier;
+    Verifier verifier;
 
-        verifier = new Verifier( testDir.getAbsolutePath() );
+    verifier = new Verifier( testDir.getAbsolutePath() );
 
-        List cliOptions = new ArrayList();
-        cliOptions.add( "-N" );
-        verifier.executeGoals( Lists.<String>newArrayList(
-                "clean",
-                "dependency:unpack-dependencies",
-                "generate-resources"));
+    List cliOptions = new ArrayList();
+    cliOptions.add( "-N" );
+    verifier.executeGoals( Lists.<String>newArrayList(
+        "clean",
+        "dependency:unpack-dependencies",
+        "generate-resources"));
 
-        verifier.verifyErrorFreeLog();
+    verifier.verifyErrorFreeLog();
 
-        verifier.resetStreams();
+    verifier.resetStreams();
 
-        File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-resources/hbase-mojo");
+    File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-resources/hbase-mojo");
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(txtFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(txtFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorTest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorTest.java b/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorTest.java
index 254578c..ac876d5 100644
--- a/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorTest.java
+++ b/streams-plugins/streams-plugin-hbase/src/test/java/org/apache/streams/plugins/test/StreamsHbaseResourceGeneratorTest.java
@@ -19,22 +19,23 @@
 
 package org.apache.streams.plugins.test;
 
+import org.apache.streams.plugins.hbase.StreamsHbaseGenerationConfig;
+import org.apache.streams.plugins.hbase.StreamsHbaseResourceGenerator;
+
 import com.google.common.base.Predicate;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.common.io.Files;
 import org.apache.commons.io.FileUtils;
-import org.apache.streams.plugins.hbase.StreamsHbaseGenerationConfig;
-import org.apache.streams.plugins.hbase.StreamsHbaseResourceGenerator;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Collection;
 import java.util.Iterator;
+import javax.annotation.Nullable;
 
 import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
 
@@ -43,79 +44,81 @@ import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
  */
 public class StreamsHbaseResourceGeneratorTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsHbaseResourceGeneratorTest.class);
-
-    public static final Predicate<File> txtFilter = new Predicate<File>() {
-        @Override
-        public boolean apply(@Nullable File file) {
-            if( file.getName().endsWith(".txt") )
-                return true;
-            else return false;
-        }
-    };
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsHbaseResourceGeneratorTest.class);
 
-    /**
-     * Tests that all example activities can be loaded into Activity beans
-     *
-     * @throws Exception
-     */
-    @Test
-    public void StreamsHbaseResourceGenerator() throws Exception {
+  public static final Predicate<File> txtFilter = new Predicate<File>() {
+    @Override
+    public boolean apply(@Nullable File file) {
+      if ( file.getName().endsWith(".txt") ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  };
 
-        StreamsHbaseGenerationConfig config = new StreamsHbaseGenerationConfig();
+  /**
+   * Tests that all example activities can be loaded into Activity beans.
+   *
+   * @throws Exception Exception
+   */
+  @Test
+  public void testStreamsHbaseResourceGenerator() throws Exception {
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    StreamsHbaseGenerationConfig config = new StreamsHbaseGenerationConfig();
 
-        config.setSourceDirectory(sourceDirectory);
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
 
-        config.setTargetDirectory("target/generated-resources/hbase");
+    config.setSourceDirectory(sourceDirectory);
 
-        config.setExclusions(Sets.newHashSet("attachments"));
+    config.setTargetDirectory("target/generated-resources/hbase");
 
-        config.setColumnFamily("cf");
-        config.setMaxDepth(2);
+    config.setExclusions(Sets.newHashSet("attachments"));
 
-        StreamsHbaseResourceGenerator streamsHbaseResourceGenerator = new StreamsHbaseResourceGenerator(config);
-        streamsHbaseResourceGenerator.run();
+    config.setColumnFamily("cf");
+    config.setMaxDepth(2);
 
-        File testOutput = config.getTargetDirectory();
+    StreamsHbaseResourceGenerator streamsHbaseResourceGenerator = new StreamsHbaseResourceGenerator(config);
+    streamsHbaseResourceGenerator.run();
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    File testOutput = config.getTargetDirectory();
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(txtFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        String expectedDirectory = "target/test-classes/expected";
-        File testExpected = new File( expectedDirectory );
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(txtFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
 
-        Iterable<File> expectedIterator = Files.fileTreeTraverser().breadthFirstTraversal(testExpected)
-                .filter(txtFilter);
-        Collection<File> expectedCollection = Lists.newArrayList(expectedIterator);
+    String expectedDirectory = "target/test-classes/expected";
+    File testExpected = new File( expectedDirectory );
 
-        int fails = 0;
+    Iterable<File> expectedIterator = Files.fileTreeTraverser().breadthFirstTraversal(testExpected)
+        .filter(txtFilter);
+    Collection<File> expectedCollection = Lists.newArrayList(expectedIterator);
 
-        Iterator<File> iterator = expectedCollection.iterator();
-        while( iterator.hasNext() ) {
-            File objectExpected = iterator.next();
-            String expectedEnd = dropSourcePathPrefix(objectExpected.getAbsolutePath(),  expectedDirectory);
-            File objectActual = new File(config.getTargetDirectory() + "/" + expectedEnd);
-            LOGGER.info("Comparing: {} and {}", objectExpected.getAbsolutePath(), objectActual.getAbsolutePath());
-            assert( objectActual.exists());
-            if( FileUtils.contentEquals(objectActual, objectExpected) == true ) {
-                LOGGER.info("Exact Match!");
-            } else {
-                LOGGER.info("No Match!");
-                fails++;
-            }
-        }
-        if( fails > 0 ) {
-            LOGGER.info("Fails: {}", fails);
-            Assert.fail();
-        }
+    int fails = 0;
 
+    Iterator<File> iterator = expectedCollection.iterator();
+    while ( iterator.hasNext() ) {
+      File objectExpected = iterator.next();
+      String expectedEnd = dropSourcePathPrefix(objectExpected.getAbsolutePath(),  expectedDirectory);
+      File objectActual = new File(config.getTargetDirectory() + "/" + expectedEnd);
+      LOGGER.info("Comparing: {} and {}", objectExpected.getAbsolutePath(), objectActual.getAbsolutePath());
+      assert ( objectActual.exists());
+      if ( FileUtils.contentEquals(objectActual, objectExpected) == true ) {
+        LOGGER.info("Exact Match!");
+      } else {
+        LOGGER.info("No Match!");
+        fails++;
+      }
     }
+    if ( fails > 0 ) {
+      LOGGER.info("Fails: {}", fails);
+      Assert.fail();
+    }
+
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveGenerationConfig.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveGenerationConfig.java b/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveGenerationConfig.java
index b09ce18..cc32255 100644
--- a/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveGenerationConfig.java
+++ b/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveGenerationConfig.java
@@ -20,6 +20,7 @@
 package org.apache.streams.plugins.hive;
 
 import org.apache.streams.util.schema.GenerationConfig;
+
 import org.jsonschema2pojo.DefaultGenerationConfig;
 import org.jsonschema2pojo.util.URLUtil;
 
@@ -33,70 +34,69 @@ import java.util.List;
 import java.util.Set;
 
 /**
- * Configures StreamsHiveResourceGenerator
- *
- *
+ * Configures StreamsHiveResourceGenerator.
  */
 public class StreamsHiveGenerationConfig extends DefaultGenerationConfig implements GenerationConfig {
 
-    public String getSourceDirectory() {
-        return sourceDirectory;
-    }
+  public String getSourceDirectory() {
+    return sourceDirectory;
+  }
 
-    public List<String> getSourcePaths() {
-        return sourcePaths;
-    }
+  public List<String> getSourcePaths() {
+    return sourcePaths;
+  }
 
-    private String sourceDirectory;
-    private List<String> sourcePaths = new ArrayList<String>();
-    private String targetDirectory;
-    private int maxDepth = 1;
+  private String sourceDirectory;
+  private List<String> sourcePaths = new ArrayList<String>();
+  private String targetDirectory;
+  private int maxDepth = 1;
 
-    public Set<String> getExclusions() {
-        return exclusions;
-    }
+  public Set<String> getExclusions() {
+    return exclusions;
+  }
 
-    public void setExclusions(Set<String> exclusions) {
-        this.exclusions = exclusions;
-    }
+  public void setExclusions(Set<String> exclusions) {
+    this.exclusions = exclusions;
+  }
 
-    private Set<String> exclusions = new HashSet<String>();
+  private Set<String> exclusions = new HashSet<String>();
 
-    public int getMaxDepth() {
-        return maxDepth;
-    }
+  public int getMaxDepth() {
+    return maxDepth;
+  }
 
-    public void setSourceDirectory(String sourceDirectory) {
-        this.sourceDirectory = sourceDirectory;
-    }
+  public void setSourceDirectory(String sourceDirectory) {
+    this.sourceDirectory = sourceDirectory;
+  }
 
-    public void setSourcePaths(List<String> sourcePaths) {
-        this.sourcePaths = sourcePaths;
-    }
+  public void setSourcePaths(List<String> sourcePaths) {
+    this.sourcePaths = sourcePaths;
+  }
 
-    public void setTargetDirectory(String targetDirectory) {
-        this.targetDirectory = targetDirectory;
-    }
+  public void setTargetDirectory(String targetDirectory) {
+    this.targetDirectory = targetDirectory;
+  }
 
-    @Override
-    public File getTargetDirectory() {
-        return new File(targetDirectory);
-    }
+  @Override
+  public File getTargetDirectory() {
+    return new File(targetDirectory);
+  }
 
-    @Override
-    public Iterator<URL> getSource() {
-        if (null != sourceDirectory) {
-            return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
-        }
-        List<URL> sourceURLs = new ArrayList<URL>();
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            for (String source : sourcePaths) {
-                sourceURLs.add(URLUtil.parseURL(source));
-            }
-        return sourceURLs.iterator();
+  @Override
+  public Iterator<URL> getSource() {
+    if (null != sourceDirectory) {
+      return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
     }
-
-    public void setMaxDepth(int maxDepth) {
-        this.maxDepth = maxDepth;
+    List<URL> sourceUrls = new ArrayList<URL>();
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      for (String source : sourcePaths) {
+        sourceUrls.add(URLUtil.parseURL(source));
+      }
     }
+    return sourceUrls.iterator();
+  }
+
+  public void setMaxDepth(int maxDepth) {
+    this.maxDepth = maxDepth;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGenerator.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGenerator.java b/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGenerator.java
index c270e14..e6cb54c 100644
--- a/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGenerator.java
+++ b/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGenerator.java
@@ -19,12 +19,6 @@
 
 package org.apache.streams.plugins.hive;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
 import org.apache.streams.util.schema.FieldType;
 import org.apache.streams.util.schema.FieldUtil;
 import org.apache.streams.util.schema.FileUtil;
@@ -32,17 +26,29 @@ import org.apache.streams.util.schema.GenerationConfig;
 import org.apache.streams.util.schema.Schema;
 import org.apache.streams.util.schema.SchemaStore;
 import org.apache.streams.util.schema.SchemaStoreImpl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Joiner;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
 import org.jsonschema2pojo.util.URLUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.net.URL;
-import java.util.*;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
 
 import static com.google.common.base.Preconditions.checkNotNull;
-import static org.apache.commons.lang3.StringUtils.defaultString;
-import static org.apache.streams.util.schema.FileUtil.*;
+import static org.apache.streams.util.schema.FileUtil.dropExtension;
+import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
+import static org.apache.streams.util.schema.FileUtil.swapExtension;
+import static org.apache.streams.util.schema.FileUtil.writeFile;
 
 /**
  * Generates hive table definitions for using org.openx.data.jsonserde.JsonSerDe on new-line delimited json documents.
@@ -51,277 +57,300 @@ import static org.apache.streams.util.schema.FileUtil.*;
  */
 public class StreamsHiveResourceGenerator implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsHiveResourceGenerator.class);
-
-    private final static String LS = System.getProperty("line.separator");
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsHiveResourceGenerator.class);
 
-    private StreamsHiveGenerationConfig config;
+  private static final String LS = System.getProperty("line.separator");
 
-    private SchemaStore schemaStore = new SchemaStoreImpl();
+  private StreamsHiveGenerationConfig config;
 
-    private int currentDepth = 0;
+  private SchemaStore schemaStore = new SchemaStoreImpl();
 
-    public static void main(String[] args) {
-        StreamsHiveGenerationConfig config = new StreamsHiveGenerationConfig();
+  private int currentDepth = 0;
 
-        String sourceDirectory = "src/main/jsonschema";
-        String targetDirectory = "target/generated-resources/hive";
+  /**
+   * Run from CLI without Maven
+   *
+   * <p/>
+   * java -jar streams-plugin-hive-jar-with-dependencies.jar StreamsHiveResourceGenerator src/main/jsonschema target/generated-resources
+   *
+   * @param args [sourceDirectory, targetDirectory]
+   * */
+  public static void main(String[] args) {
+    StreamsHiveGenerationConfig config = new StreamsHiveGenerationConfig();
 
-        if( args.length > 0 )
-            sourceDirectory = args[0];
-        if( args.length > 1 )
-            targetDirectory = args[1];
+    String sourceDirectory = "src/main/jsonschema";
+    String targetDirectory = "target/generated-resources/hive";
 
-        config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
-
-        StreamsHiveResourceGenerator streamsHiveResourceGenerator = new StreamsHiveResourceGenerator(config);
-        streamsHiveResourceGenerator.run();
+    if ( args.length > 0 ) {
+      sourceDirectory = args[0];
     }
-
-    public StreamsHiveResourceGenerator(StreamsHiveGenerationConfig config) {
-        this.config = config;
+    if ( args.length > 1 ) {
+      targetDirectory = args[1];
     }
 
-    public void run() {
+    config.setSourceDirectory(sourceDirectory);
+    config.setTargetDirectory(targetDirectory);
 
-        checkNotNull(config);
+    StreamsHiveResourceGenerator streamsHiveResourceGenerator = new StreamsHiveResourceGenerator(config);
+    streamsHiveResourceGenerator.run();
+  }
 
-        generate(config);
+  public StreamsHiveResourceGenerator(StreamsHiveGenerationConfig config) {
+    this.config = config;
+  }
 
-    }
+  @Override
+  public void run() {
 
-    public void generate(StreamsHiveGenerationConfig config) {
+    checkNotNull(config);
 
-        LinkedList<File> sourceFiles = new LinkedList<File>();
+    generate(config);
 
-        for (Iterator<URL> sources = config.getSource(); sources.hasNext();) {
-            URL source = sources.next();
-            sourceFiles.add(URLUtil.getFileFromURL(source));
-        }
+  }
 
-        LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
+  /**
+   * run generate using supplied StreamsHiveGenerationConfig.
+   * @param config StreamsHiveGenerationConfig
+   */
+  public void generate(StreamsHiveGenerationConfig config) {
 
-        FileUtil.resolveRecursive((GenerationConfig)config, sourceFiles);
+    LinkedList<File> sourceFiles = new LinkedList<File>();
 
-        LOGGER.info("Resolved {} schema files:", sourceFiles.size());
+    for (Iterator<URL> sources = config.getSource(); sources.hasNext();) {
+      URL source = sources.next();
+      sourceFiles.add(URLUtil.getFileFromURL(source));
+    }
 
-        for (Iterator<File> iterator = sourceFiles.iterator(); iterator.hasNext();) {
-            File item = iterator.next();
-            schemaStore.create(item.toURI());
-        }
+    LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
 
-        LOGGER.info("Identified {} objects:", schemaStore.getSize());
+    FileUtil.resolveRecursive((GenerationConfig)config, sourceFiles);
 
-        for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
-            Schema schema = schemaIterator.next();
-            currentDepth = 0;
-            if( schema.getURI().getScheme().equals("file")) {
-                String inputFile = schema.getURI().getPath();
-                String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-                for (String sourcePath : config.getSourcePaths()) {
-                    resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
-                }
-                String outputFile = config.getTargetDirectory() + "/" + swapExtension(resourcePath, "json", "hql");
+    LOGGER.info("Resolved {} schema files:", sourceFiles.size());
 
-                LOGGER.info("Processing {}:", resourcePath);
+    for (Iterator<File> iterator = sourceFiles.iterator(); iterator.hasNext();) {
+      File item = iterator.next();
+      schemaStore.create(item.toURI());
+    }
 
-                String resourceId = dropExtension(resourcePath).replace("/", "_");
+    LOGGER.info("Identified {} objects:", schemaStore.getSize());
 
-                String resourceContent = generateResource(schema, resourceId);
+    for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
+      Schema schema = schemaIterator.next();
+      currentDepth = 0;
+      if ( schema.getUri().getScheme().equals("file")) {
+        String inputFile = schema.getUri().getPath();
+        String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+        for (String sourcePath : config.getSourcePaths()) {
+          resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
+        }
+        String outputFile = config.getTargetDirectory() + "/" + swapExtension(resourcePath, "json", "hql");
 
-                writeFile(outputFile, resourceContent);
+        LOGGER.info("Processing {}:", resourcePath);
 
-                LOGGER.info("Wrote {}:", outputFile);
-            }
-        }
-    }
+        String resourceId = dropExtension(resourcePath).replace("/", "_");
 
-    public String generateResource(Schema schema, String resourceId) {
-        StringBuilder resourceBuilder = new StringBuilder();
-        resourceBuilder.append("CREATE TABLE ");
-        resourceBuilder.append(hqlEscape(resourceId));
-        resourceBuilder.append(LS);
-        resourceBuilder.append("(");
-        resourceBuilder.append(LS);
-        resourceBuilder = appendRootObject(resourceBuilder, schema, resourceId, ' ');
-        resourceBuilder.append(")");
-        resourceBuilder.append(LS);
-        resourceBuilder.append("ROW FORMAT SERDE 'org.openx.data.jsonserde.JsonSerDe'");
-        resourceBuilder.append(LS);
-        resourceBuilder.append("WITH SERDEPROPERTIES (\"ignore.malformed.json\" = \"true\"");
-        resourceBuilder.append(LS);
-        resourceBuilder.append("STORED AS TEXTFILE");
-        resourceBuilder.append(LS);
-        resourceBuilder.append("LOCATION '${hiveconf:path}';");
-        resourceBuilder.append(LS);
-        return resourceBuilder.toString();
-    }
+        String resourceContent = generateResource(schema, resourceId);
 
-    public StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId, Character seperator) {
-        ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
-        if( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0) {
-            builder = appendPropertiesNode(builder, schema, propertiesNode, seperator);
-        }
-        return builder;
-    }
+        writeFile(outputFile, resourceContent);
 
-    private StringBuilder appendValueField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        builder.append(hqlEscape(fieldId));
-        builder.append(seperator);
-        builder.append(hqlType(fieldType));
-        return builder;
+        LOGGER.info("Wrote {}:", outputFile);
+      }
     }
-
-    public StringBuilder appendArrayItems(StringBuilder builder, Schema schema, String fieldId, ObjectNode itemsNode, Character seperator) {
-        // not safe to append nothing
-        checkNotNull(builder);
-        if( itemsNode == null ) return builder;
-        if( itemsNode.has("type")) {
-            try {
-                FieldType itemType = FieldUtil.determineFieldType(itemsNode);
-                switch( itemType ) {
-                    case OBJECT:
-                        builder = appendArrayObject(builder, schema, fieldId, itemsNode, seperator);
-                        break;
-                    case ARRAY:
-                        ObjectNode subArrayItems = (ObjectNode) itemsNode.get("items");
-                        builder = appendArrayItems(builder, schema, fieldId, subArrayItems, seperator);
-                        break;
-                    default:
-                        builder = appendArrayField(builder, schema, fieldId, itemType, seperator);
-                }
-            } catch (Exception e) {
-                LOGGER.warn("No item type resolvable for {}", fieldId);
-            }
-        }
-        checkNotNull(builder);
-        return builder;
+  }
+
+  /**
+   * generateResource String from schema and resourceId.
+   * @param schema Schema
+   * @param resourceId String
+   * @return CREATE TABLE ...
+   */
+  public String generateResource(Schema schema, String resourceId) {
+    StringBuilder resourceBuilder = new StringBuilder();
+    resourceBuilder.append("CREATE TABLE ");
+    resourceBuilder.append(hqlEscape(resourceId));
+    resourceBuilder.append(LS);
+    resourceBuilder.append("(");
+    resourceBuilder.append(LS);
+    resourceBuilder = appendRootObject(resourceBuilder, schema, resourceId, ' ');
+    resourceBuilder.append(")");
+    resourceBuilder.append(LS);
+    resourceBuilder.append("ROW FORMAT SERDE 'org.openx.data.jsonserde.JsonSerDe'");
+    resourceBuilder.append(LS);
+    resourceBuilder.append("WITH SERDEPROPERTIES (\"ignore.malformed.json\" = \"true\"");
+    resourceBuilder.append(LS);
+    resourceBuilder.append("STORED AS TEXTFILE");
+    resourceBuilder.append(LS);
+    resourceBuilder.append("LOCATION '${hiveconf:path}';");
+    resourceBuilder.append(LS);
+    return resourceBuilder.toString();
+  }
+
+  protected StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId, Character seperator) {
+    ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
+    if ( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0) {
+      builder = appendPropertiesNode(builder, schema, propertiesNode, seperator);
     }
-
-    private StringBuilder appendArrayField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        checkNotNull(fieldId);
-        builder.append(hqlEscape(fieldId));
-        builder.append(seperator);
-        builder.append("ARRAY<"+hqlType(fieldType)+">");
-        checkNotNull(builder);
-        return builder;
+    return builder;
+  }
+
+  private StringBuilder appendValueField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    builder.append(hqlEscape(fieldId));
+    builder.append(seperator);
+    builder.append(hqlType(fieldType));
+    return builder;
+  }
+
+  protected StringBuilder appendArrayItems(StringBuilder builder, Schema schema, String fieldId, ObjectNode itemsNode, Character seperator) {
+    // not safe to append nothing
+    checkNotNull(builder);
+    if ( itemsNode == null ) {
+      return builder;
     }
-
-    private StringBuilder appendArrayObject(StringBuilder builder, Schema schema, String fieldId, ObjectNode fieldNode, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        checkNotNull(fieldNode);
-        if( !Strings.isNullOrEmpty(fieldId)) {
-            builder.append(hqlEscape(fieldId));
-            builder.append(seperator);
+    if ( itemsNode.has("type")) {
+      try {
+        FieldType itemType = FieldUtil.determineFieldType(itemsNode);
+        switch ( itemType ) {
+          case OBJECT:
+            builder = appendArrayObject(builder, schema, fieldId, itemsNode, seperator);
+            break;
+          case ARRAY:
+            ObjectNode subArrayItems = (ObjectNode) itemsNode.get("items");
+            builder = appendArrayItems(builder, schema, fieldId, subArrayItems, seperator);
+            break;
+          default:
+            builder = appendArrayField(builder, schema, fieldId, itemType, seperator);
         }
-        builder.append("ARRAY");
-        builder.append(LS);
-        builder.append("<");
-        builder.append(LS);
-        ObjectNode propertiesNode = schemaStore.resolveProperties(schema, fieldNode, fieldId);
-        builder = appendStructField(builder, schema, "", propertiesNode, ':');
-        builder.append(">");
-        checkNotNull(builder);
-        return builder;
+      } catch (Exception ex) {
+        LOGGER.warn("No item type resolvable for {}", fieldId);
+      }
     }
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendArrayField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    checkNotNull(fieldId);
+    builder.append(hqlEscape(fieldId));
+    builder.append(seperator);
+    builder.append("ARRAY<" + hqlType(fieldType) + ">");
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendArrayObject(StringBuilder builder, Schema schema, String fieldId, ObjectNode fieldNode, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    checkNotNull(fieldNode);
+    if ( !Strings.isNullOrEmpty(fieldId)) {
+      builder.append(hqlEscape(fieldId));
+      builder.append(seperator);
+    }
+    builder.append("ARRAY");
+    builder.append(LS);
+    builder.append("<");
+    builder.append(LS);
+    ObjectNode propertiesNode = schemaStore.resolveProperties(schema, fieldNode, fieldId);
+    builder = appendStructField(builder, schema, "", propertiesNode, ':');
+    builder.append(">");
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendStructField(StringBuilder builder, Schema schema, String fieldId, ObjectNode propertiesNode, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    checkNotNull(propertiesNode);
+
+    if ( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0 ) {
+
+      currentDepth += 1;
+
+      if ( !Strings.isNullOrEmpty(fieldId)) {
+        builder.append(hqlEscape(fieldId));
+        builder.append(seperator);
+      }
+      builder.append("STRUCT");
+      builder.append(LS);
+      builder.append("<");
+      builder.append(LS);
 
-    private StringBuilder appendStructField(StringBuilder builder, Schema schema, String fieldId, ObjectNode propertiesNode, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        checkNotNull(propertiesNode);
-
-        if( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0 ) {
-
-            currentDepth += 1;
-
-            if( !Strings.isNullOrEmpty(fieldId)) {
-                builder.append(hqlEscape(fieldId));
-                builder.append(seperator);
-            }
-            builder.append("STRUCT");
-            builder.append(LS);
-            builder.append("<");
-            builder.append(LS);
-
-            builder = appendPropertiesNode(builder, schema, propertiesNode, ':');
+      builder = appendPropertiesNode(builder, schema, propertiesNode, ':');
 
-            builder.append(">");
-            builder.append(LS);
+      builder.append(">");
+      builder.append(LS);
 
-            currentDepth -= 1;
+      currentDepth -= 1;
 
-        }
-        checkNotNull(builder);
-        return builder;
     }
-
-    private StringBuilder appendPropertiesNode(StringBuilder builder, Schema schema, ObjectNode propertiesNode, Character seperator) {
-        checkNotNull(builder);
-        checkNotNull(propertiesNode);
-        Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
-        Joiner joiner = Joiner.on(","+LS).skipNulls();
-        List<String> fieldStrings = Lists.newArrayList();
-        for( ; fields.hasNext(); ) {
-            Map.Entry<String, JsonNode> field = fields.next();
-            String fieldId = field.getKey();
-            if( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
-                ObjectNode fieldNode = (ObjectNode) field.getValue();
-                FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
-                if (fieldType != null ) {
-                    switch (fieldType) {
-                        case ARRAY:
-                            ObjectNode itemsNode = (ObjectNode) fieldNode.get("items");
-                            if( currentDepth <= config.getMaxDepth()) {
-                                StringBuilder arrayItemsBuilder = appendArrayItems(new StringBuilder(), schema, fieldId, itemsNode, seperator);
-                                if( !Strings.isNullOrEmpty(arrayItemsBuilder.toString())) {
-                                    fieldStrings.add(arrayItemsBuilder.toString());
-                                }
-                            }
-                            break;
-                        case OBJECT:
-                            ObjectNode childProperties = schemaStore.resolveProperties(schema, fieldNode, fieldId);
-                            if( currentDepth < config.getMaxDepth()) {
-                                StringBuilder structFieldBuilder = appendStructField(new StringBuilder(), schema, fieldId, childProperties, seperator);
-                                if( !Strings.isNullOrEmpty(structFieldBuilder.toString())) {
-                                    fieldStrings.add(structFieldBuilder.toString());
-                                }
-                            }
-                            break;
-                        default:
-                            StringBuilder valueFieldBuilder = appendValueField(new StringBuilder(), schema, fieldId, fieldType, seperator);
-                            if( !Strings.isNullOrEmpty(valueFieldBuilder.toString())) {
-                                fieldStrings.add(valueFieldBuilder.toString());
-                            }
-                    }
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendPropertiesNode(StringBuilder builder, Schema schema, ObjectNode propertiesNode, Character seperator) {
+    checkNotNull(builder);
+    checkNotNull(propertiesNode);
+    Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
+    Joiner joiner = Joiner.on("," + LS).skipNulls();
+    List<String> fieldStrings = Lists.newArrayList();
+    for ( ; fields.hasNext(); ) {
+      Map.Entry<String, JsonNode> field = fields.next();
+      String fieldId = field.getKey();
+      if ( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
+        ObjectNode fieldNode = (ObjectNode) field.getValue();
+        FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
+        if (fieldType != null ) {
+          switch (fieldType) {
+            case ARRAY:
+              ObjectNode itemsNode = (ObjectNode) fieldNode.get("items");
+              if ( currentDepth <= config.getMaxDepth()) {
+                StringBuilder arrayItemsBuilder = appendArrayItems(new StringBuilder(), schema, fieldId, itemsNode, seperator);
+                if ( !Strings.isNullOrEmpty(arrayItemsBuilder.toString())) {
+                  fieldStrings.add(arrayItemsBuilder.toString());
                 }
-            }
-        }
-        builder.append(joiner.join(fieldStrings)).append(LS);
-        Preconditions.checkNotNull(builder);
-        return builder;
-    }
-
-    private static String hqlEscape( String fieldId ) {
-        return "`"+fieldId+"`";
-    }
-
-    private static String hqlType( FieldType fieldType ) {
-        switch( fieldType ) {
-            case INTEGER:
-                return "INT";
-            case NUMBER:
-                return "FLOAT";
+              }
+              break;
             case OBJECT:
-                return "STRUCT";
+              ObjectNode childProperties = schemaStore.resolveProperties(schema, fieldNode, fieldId);
+              if ( currentDepth < config.getMaxDepth()) {
+                StringBuilder structFieldBuilder = appendStructField(new StringBuilder(), schema, fieldId, childProperties, seperator);
+                if ( !Strings.isNullOrEmpty(structFieldBuilder.toString())) {
+                  fieldStrings.add(structFieldBuilder.toString());
+                }
+              }
+              break;
             default:
-                return fieldType.name().toUpperCase();
+              StringBuilder valueFieldBuilder = appendValueField(new StringBuilder(), schema, fieldId, fieldType, seperator);
+              if ( !Strings.isNullOrEmpty(valueFieldBuilder.toString())) {
+                fieldStrings.add(valueFieldBuilder.toString());
+              }
+          }
         }
+      }
+    }
+    builder.append(joiner.join(fieldStrings)).append(LS);
+    Preconditions.checkNotNull(builder);
+    return builder;
+  }
+
+  private static String hqlEscape( String fieldId ) {
+    return "`" + fieldId + "`";
+  }
+
+  private static String hqlType( FieldType fieldType ) {
+    switch ( fieldType ) {
+      case INTEGER:
+        return "INT";
+      case NUMBER:
+        return "FLOAT";
+      case OBJECT:
+        return "STRUCT";
+      default:
+        return fieldType.name().toUpperCase();
     }
+  }
 
 }



[10/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/data/DocumentClassifier.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/data/DocumentClassifier.java b/streams-pojo/src/main/java/org/apache/streams/data/DocumentClassifier.java
index bcb14b7..b15722f 100644
--- a/streams-pojo/src/main/java/org/apache/streams/data/DocumentClassifier.java
+++ b/streams-pojo/src/main/java/org/apache/streams/data/DocumentClassifier.java
@@ -27,13 +27,13 @@ import java.util.List;
  */
 public interface DocumentClassifier extends Serializable {
 
-    /**
-     * Assess the structure of the document, and identify whether the provided document is
-     * a structural match for one or more typed forms.
-     *
-     * @param document the document
-     * @return a serializable pojo class this document matches
-     */
-    List<Class> detectClasses(Object document);
+  /**
+   * Assess the structure of the document, and identify whether the provided document is
+   * a structural match for one or more typed forms.
+   *
+   * @param document the document
+   * @return a serializable pojo class this document matches
+   */
+  List<Class> detectClasses(Object document);
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/data/util/ActivityUtil.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/data/util/ActivityUtil.java b/streams-pojo/src/main/java/org/apache/streams/data/util/ActivityUtil.java
index dc10df1..c7e8337 100644
--- a/streams-pojo/src/main/java/org/apache/streams/data/util/ActivityUtil.java
+++ b/streams-pojo/src/main/java/org/apache/streams/data/util/ActivityUtil.java
@@ -18,123 +18,140 @@
 
 package org.apache.streams.data.util;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import java.util.HashMap;
 import java.util.Map;
 
 /**
  * Utility class for managing activities
  *
+ * <p/>
  * Deprecated: Use {@link org.apache.streams.pojo.extensions.ExtensionUtil}
  */
 @Deprecated
 public class ActivityUtil {
 
-    private ActivityUtil() {}
-
-    /**
-     * Property on the activity object to use for extensions
-     */
-    public static final String EXTENSION_PROPERTY = "extensions";
-    /**
-     * The number of +1, Like, favorites, etc that the post has received
-     */
-    public static final String LIKES_EXTENSION = "likes";
-    /**
-     * The number of retweets, shares, etc that the post has received
-     */
-    public static final String REBROADCAST_EXTENSION = "rebroadcasts";
-    /**
-     * The language of the post
-     */
-    public static final String LANGUAGE_EXTENSION = "language";
-    /**
-     * Location that the post was made or the actor's residence
-     */
-    public static final String LOCATION_EXTENSION = "location";
-    /**
-     * Country that the post was made
-     */
-    public static final String LOCATION_EXTENSION_COUNTRY = "country";
-    /**
-     * Specific JSON-geo coordinates (long,lat)
-     */
-    public static final String LOCATION_EXTENSION_COORDINATES = "coordinates";
-
-    private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    /**
-     * Creates a standard extension property
-     * @param activity activity to create the property in
-     * @return the Map representing the extensions property
-     */
-    @SuppressWarnings("unchecked")
-    @Deprecated
-    public static Map<String, Object> ensureExtensions(Activity activity) {
-        Map<String, Object> extensions = (Map)activity.getAdditionalProperties().get(EXTENSION_PROPERTY);
-        if(extensions == null) {
-            extensions = new HashMap<>();
-            activity.setAdditionalProperty(EXTENSION_PROPERTY, extensions);
-        }
-        return extensions;
-    }
+  private ActivityUtil() {}
 
-    /**
-     * Gets a formatted ID
-     * @param providerName name of the provider
-     * @param personId ID of the person within the system
-     * @return id:<providerName>:people:<personId>
-     */
-    public static String getPersonId(String providerName, String personId) {
-        return String.format("id:%s:people:%s", providerName, personId);
-    }
+  /**
+   * Property on the activity object to use for extensions.
+   */
+  public static final String EXTENSION_PROPERTY = "extensions";
 
-    /**
-     * Gets a formatted provider ID
-     * @param providerName name of the provider
-     * @return id:providers:<providerName>
-     */
-    public static String getProviderId(String providerName) {
-        return String.format("id:providers:%s", providerName);
-    }
+  /**
+   * The number of +1, Like, favorites, etc that the post has received.
+   */
+  public static final String LIKES_EXTENSION = "likes";
 
-    /**
-     * Gets a formatted object ID
-     * @param provider name of the provider
-     * @param objectType type of the object
-     * @param objectId the ID of the object
-     * @return id:<provider>:<objectType>s:<objectId>
-     */
-    public static String getObjectId(String provider, String objectType, String objectId) {
-        return String.format("id:%s:%ss:%s", provider, objectType, objectId);
-    }
+  /**
+   * The number of retweets, shares, etc that the post has received.
+   */
+  public static final String REBROADCAST_EXTENSION = "rebroadcasts";
 
-    /**
-     * Gets a formatted activity ID
-     * @param providerName name of the provider
-     * @param activityId ID of the provider
-     * @return id:<providerName>:activities:<activityId>
-     */
-    public static String getActivityId(String providerName, String activityId) {
-        return String.format("id:%s:activities:%s", providerName, activityId);
-    }
+  /**
+   * The language of the post.
+   */
+  public static final String LANGUAGE_EXTENSION = "language";
 
-    public static boolean isValid(Activity activity) {
-        return activity != null
-                && activity.getId() != null
-                && activity.getVerb() != null
-                && activity.getProvider() != null
-                && activity.getProvider().getId() != null;
-    }
+  /**
+   * Location that the post was made or the actor's residence.
+   */
+  public static final String LOCATION_EXTENSION = "location";
 
-    public static boolean isValid(ActivityObject activityObject) {
-        return activityObject != null
-            && activityObject.getId() != null
-            && activityObject.getObjectType() != null;
+  /**
+   * Country that the post was made.
+   */
+  public static final String LOCATION_EXTENSION_COUNTRY = "country";
+
+  /**
+   * Specific JSON-geo coordinates (long,lat).
+   */
+  public static final String LOCATION_EXTENSION_COORDINATES = "coordinates";
+
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  /**
+   * Creates a standard extension property.
+   * @param activity activity to create the property in
+   * @return the Map representing the extensions property
+   */
+  @SuppressWarnings("unchecked")
+  @Deprecated
+  public static Map<String, Object> ensureExtensions(Activity activity) {
+    Map<String, Object> extensions = (Map)activity.getAdditionalProperties().get(EXTENSION_PROPERTY);
+    if (extensions == null) {
+      extensions = new HashMap<>();
+      activity.setAdditionalProperty(EXTENSION_PROPERTY, extensions);
     }
+    return extensions;
+  }
+
+  /**
+   * Gets a formatted ID.
+   * @param providerName name of the provider
+   * @param personId ID of the person within the system
+   * @return id:{providerName}:people:{personId}
+   */
+  public static String getPersonId(String providerName, String personId) {
+    return String.format("id:%s:people:%s", providerName, personId);
+  }
+
+  /**
+   * Gets a formatted provider ID.
+   * @param providerName name of the provider
+   * @return id:providers:{providerName}
+   */
+  public static String getProviderId(String providerName) {
+    return String.format("id:providers:%s", providerName);
+  }
+
+  /**
+   * Gets a formatted object ID.
+   * @param provider name of the provider
+   * @param objectType type of the object
+   * @param objectId the ID of the object
+   * @return id:{provider}:{objectType}s:{objectId}
+   */
+  public static String getObjectId(String provider, String objectType, String objectId) {
+    return String.format("id:%s:%ss:%s", provider, objectType, objectId);
+  }
+
+  /**
+   * Gets a formatted activity ID.
+   * @param providerName name of the provider
+   * @param activityId ID of the provider
+   * @return id:{providerName}:activities:{activityId}
+   */
+  public static String getActivityId(String providerName, String activityId) {
+    return String.format("id:%s:activities:%s", providerName, activityId);
+  }
+
+  /**
+   * Check validity of Activity.
+   * @param activity Activity
+   * @return isValid
+   */
+  public static boolean isValid(Activity activity) {
+    return activity != null
+        && activity.getId() != null
+        && activity.getVerb() != null
+        && activity.getProvider() != null
+        && activity.getProvider().getId() != null;
+  }
+
+  /**
+   * Check validity of ActivityObject.
+   * @param activityObject ActivityObject
+   * @return isValid
+   */
+  public static boolean isValid(ActivityObject activityObject) {
+    return activityObject != null
+        && activityObject.getId() != null
+        && activityObject.getObjectType() != null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/data/util/JsonUtil.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/data/util/JsonUtil.java b/streams-pojo/src/main/java/org/apache/streams/data/util/JsonUtil.java
deleted file mode 100644
index 6ab2800..0000000
--- a/streams-pojo/src/main/java/org/apache/streams/data/util/JsonUtil.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.data.util;
-
-import com.fasterxml.jackson.core.JsonFactory;
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ArrayNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * JSON utilities
- */
-public class JsonUtil {
-
-    private JsonUtil() {}
-
-    private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-    private static JsonFactory factory = mapper.getFactory();
-
-    public static JsonNode jsonToJsonNode(String json) {
-        JsonNode node;
-        try {
-            JsonParser jp = factory.createJsonParser(json);
-            node = mapper.readTree(jp);
-        } catch (IOException e) {
-            throw new RuntimeException("IO exception while reading JSON", e);
-        }
-        return node;
-    }
-
-    public static String jsonNodeToJson(JsonNode node) {
-        try {
-            return mapper.writeValueAsString(node);
-        } catch (JsonProcessingException e) {
-            throw new RuntimeException("IO exception while writing JSON", e);
-        }
-    }
-
-    public static <T> T jsonToObject(String json, Class<T> clazz) {
-        try {
-            return mapper.readValue(json, clazz);
-        } catch (IOException e) {
-            throw new RuntimeException("Could not map to object");
-        }
-    }
-
-    public static <T> T jsonNodeToObject(JsonNode node, Class<T> clazz) {
-        return mapper.convertValue(node, clazz);
-    }
-
-    public static <T> JsonNode objectToJsonNode(T obj) {
-        return mapper.valueToTree(obj);
-    }
-
-    public static <T> List<T> jsoNodeToList(JsonNode node, Class<T> clazz) {
-        return mapper.convertValue(node, new TypeReference<List<T>>() {});
-    }
-
-    public static <T> String objectToJson(T object) {
-        try {
-            return mapper.writeValueAsString(object);
-        } catch (IOException e) {
-            throw new RuntimeException("Could not map to object");
-        }
-    }
-
-    public static <T> T getObjFromFile(String filePath, Class<T> clazz) {
-        return jsonNodeToObject(getFromFile(filePath), clazz);
-    }
-
-    public static JsonNode getFromFile(String filePath) {
-        JsonFactory factory = mapper.getFactory(); // since 2.1 use mapper.getFactory() instead
-
-        JsonNode node = null;
-        try {
-            InputStream stream = getStreamForLocation(filePath);
-            JsonParser jp = factory.createParser(stream);
-            node = mapper.readTree(jp);
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
-        return node;
-    }
-
-    private static InputStream getStreamForLocation(String filePath) throws FileNotFoundException {
-        InputStream stream = null;
-        if(filePath.startsWith("file:///")) {
-            stream = new FileInputStream(filePath.replace("file:///", ""));
-        } else if(filePath.startsWith("file:") || filePath.startsWith("/")) {
-            stream = new FileInputStream(filePath.replace("file:", ""));
-        } else {
-            //Assume classpath
-            stream = JsonUtil.class.getClassLoader().getResourceAsStream(filePath.replace("classpath:", ""));
-        }
-
-        return stream;
-    }
-
-    /**
-     * Creates an empty array if missing
-     * @param node object to create the array within
-     * @param field location to create the array
-     * @return the Map representing the extensions property
-     */
-    public static ArrayNode ensureArray(ObjectNode node, String field) {
-        String[] path = Lists.newArrayList(Splitter.on('.').split(field)).toArray(new String[0]);
-        ObjectNode current = node;
-        ArrayNode result = null;
-        for( int i = 0; i < path.length; i++) {
-            current = ensureObject((ObjectNode) node.get(path[i]), path[i]);
-        }
-        if (current.get(field) == null)
-            current.put(field, mapper.createArrayNode());
-        result = (ArrayNode) node.get(field);
-        return result;
-    }
-
-    /**
-     * Creates an empty array if missing
-     * @param node objectnode to create the object within
-     * @param field location to create the object
-     * @return the Map representing the extensions property
-     */
-    public static ObjectNode ensureObject(ObjectNode node, String field) {
-        String[] path = Lists.newArrayList(Splitter.on('.').split(field)).toArray(new String[0]);
-        ObjectNode current = node;
-        ObjectNode result = null;
-        for( int i = 0; i < path.length; i++) {
-            if (node.get(field) == null)
-                node.put(field, mapper.createObjectNode());
-            current = (ObjectNode) node.get(field);
-        }
-        result = ensureObject((ObjectNode) node.get(path[path.length]), Joiner.on('.').join(Arrays.copyOfRange(path, 1, path.length)));
-        return result;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/data/util/RFC3339Utils.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/data/util/RFC3339Utils.java b/streams-pojo/src/main/java/org/apache/streams/data/util/RFC3339Utils.java
index bd32668..18ae064 100644
--- a/streams-pojo/src/main/java/org/apache/streams/data/util/RFC3339Utils.java
+++ b/streams-pojo/src/main/java/org/apache/streams/data/util/RFC3339Utils.java
@@ -15,6 +15,7 @@
 * specific language governing permissions and limitations
 * under the License.
 */
+
 package org.apache.streams.data.util;
 
 import org.joda.time.DateTime;
@@ -33,172 +34,176 @@ import java.util.regex.Pattern;
  */
 public class RFC3339Utils {
 
-    private static final RFC3339Utils INSTANCE = new RFC3339Utils();
-
-    public static RFC3339Utils getInstance(){
-        return INSTANCE;
-    }
-
-    private static final String BASE = "^[0-9]{4}\\-[0-9]{2}\\-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}";
-    private static final String TZ = "[+-][0-9]{2}:?[0-9]{2}$";
-    private static final String SUB_SECOND = "\\.([0-9]*)";
-    private static final String UTC = "Z$";
-
-
-    private static final Pattern MILLIS = Pattern.compile("^[0-9]*$");
-    private static final Pattern UTC_STANDARD = Pattern.compile(BASE + UTC);
-    private static final Pattern UTC_SUB_SECOND = Pattern.compile(BASE + SUB_SECOND + UTC);
-    private static final Pattern LOCAL_STANDARD = Pattern.compile(BASE + TZ);
-    private static final Pattern LOCAL_SUB_SECOND = Pattern.compile(BASE + SUB_SECOND + TZ);
-
-    private static final String BASE_FMT = "yyyy-MM-dd'T'HH:mm:ss";
-    public static final DateTimeFormatter UTC_STANDARD_FMT = DateTimeFormat.forPattern(BASE_FMT + "'Z'").withZoneUTC();
-    public static final DateTimeFormatter UTC_SUB_SECOND_FMT = DateTimeFormat.forPattern(BASE_FMT + ".SSS'Z'").withZoneUTC();
-    public static final DateTimeFormatter LOCAL_STANDARD_FMT = DateTimeFormat.forPattern(BASE_FMT + "Z").withZoneUTC();
-    public static final DateTimeFormatter LOCAL_SUB_SECOND_FMT = DateTimeFormat.forPattern(BASE_FMT + ".SSSZ").withZoneUTC();
-
-    /**
-     * Contains various formats.  All formats should be of international standards when comes to the ordering of the
-     * days and month.
-     */
-    private static final  DateTimeFormatter DEFAULT_FORMATTER;
-    /**
-     * Contains alternative formats that will succeed after failures from the DEFAULT_FORMATTER.
-     * i.e. 4/24/2014 will throw an exception on the default formatter because it will assume international date standards
-     * However, the date will parse in the ALT_FORMATTER because it contains the US format of MM/dd/yyyy.
-     */
-    private static final DateTimeFormatter ALT_FORMATTER;
-
-    static {
-        DateTimeParser[] parsers = new DateTimeParser[]{
-                DateTimeFormat.forPattern("EEE MMM dd HH:mm:ss Z yyyy").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("EEE, dd MMM yyyy HH:mm:ss Z").getParser(),
-                DateTimeFormat.forPattern("dd MMMM yyyy HH:mm:ss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyyMMdd").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd-MM-yyyy").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyy-MM-dd").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyy/MM/dd").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd MMM yyyy").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd MMMM yyyy").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyyMMddHHmm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyyMMdd HHmm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd-MM-yyyy HH:mm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyy-MM-dd HH:mm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyy/MM/dd HH:mm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd MMM yyyy HH:mm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd MMMM yyyy HH:mm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyyMMddHHmmss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyyMMdd HHmmss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd-MM-yyyy HH:mm:ss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("yyyy/MM/dd HH:mm:ss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd MMM yyyy HH:mm:ss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("HH:mm:ss yyyy/MM/dd").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("HH:mm:ss MM/dd/yyyy").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("HH:mm:ss yyyy-MM-dd").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("HH:mm:ss MM-dd-yyyy").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd/MM/yyyy HH:mm:ss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd/MM/yyyy HH:mm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("dd/MM/yyyy").withZoneUTC().getParser(),
-                UTC_STANDARD_FMT.getParser(),
-                UTC_SUB_SECOND_FMT.getParser(),
-                LOCAL_STANDARD_FMT.getParser(),
-                LOCAL_SUB_SECOND_FMT.getParser()
-        };
-        DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
-        builder.append(null, parsers);
-        DEFAULT_FORMATTER = builder.toFormatter().withZoneUTC();
-
-        DateTimeParser[] altParsers = new DateTimeParser[] {
-                DateTimeFormat.forPattern("MM-dd-yyyy HH:mm:ss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("MM/dd/yyyy HH:mm").withZoneUTC().getParser(),
-                DateTimeFormat.forPattern("MM/dd/yyyy").withZoneUTC().getParser(),
-        };
-        builder = new DateTimeFormatterBuilder();
-        builder.append(null, altParsers);
-        ALT_FORMATTER = builder.toFormatter().withZoneUTC();
+  private static final RFC3339Utils INSTANCE = new RFC3339Utils();
+
+  public static RFC3339Utils getInstance() {
+    return INSTANCE;
+  }
+
+  private static final String BASE = "^[0-9]{4}\\-[0-9]{2}\\-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}";
+  private static final String TZ = "[+-][0-9]{2}:?[0-9]{2}$";
+  private static final String SUB_SECOND = "\\.([0-9]*)";
+  private static final String UTC = "Z$";
+
+
+  private static final Pattern MILLIS = Pattern.compile("^[0-9]*$");
+  private static final Pattern UTC_STANDARD = Pattern.compile(BASE + UTC);
+  private static final Pattern UTC_SUB_SECOND = Pattern.compile(BASE + SUB_SECOND + UTC);
+  private static final Pattern LOCAL_STANDARD = Pattern.compile(BASE + TZ);
+  private static final Pattern LOCAL_SUB_SECOND = Pattern.compile(BASE + SUB_SECOND + TZ);
+
+  private static final String BASE_FMT = "yyyy-MM-dd'T'HH:mm:ss";
+  public static final DateTimeFormatter UTC_STANDARD_FMT = DateTimeFormat.forPattern(BASE_FMT + "'Z'").withZoneUTC();
+  public static final DateTimeFormatter UTC_SUB_SECOND_FMT = DateTimeFormat.forPattern(BASE_FMT + ".SSS'Z'").withZoneUTC();
+  public static final DateTimeFormatter LOCAL_STANDARD_FMT = DateTimeFormat.forPattern(BASE_FMT + "Z").withZoneUTC();
+  public static final DateTimeFormatter LOCAL_SUB_SECOND_FMT = DateTimeFormat.forPattern(BASE_FMT + ".SSSZ").withZoneUTC();
+
+  /**
+   * Contains various formats.  All formats should be of international standards when comes to the ordering of the
+   * days and month.
+   */
+  private static final  DateTimeFormatter DEFAULT_FORMATTER;
+  /**
+   * Contains alternative formats that will succeed after failures from the DEFAULT_FORMATTER.
+   * i.e. 4/24/2014 will throw an exception on the default formatter because it will assume international date standards
+   * However, the date will parse in the ALT_FORMATTER because it contains the US format of MM/dd/yyyy.
+   */
+  private static final DateTimeFormatter ALT_FORMATTER;
+
+  static {
+    DateTimeParser[] parsers = new DateTimeParser[]{
+        DateTimeFormat.forPattern("EEE MMM dd HH:mm:ss Z yyyy").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("EEE, dd MMM yyyy HH:mm:ss Z").getParser(),
+        DateTimeFormat.forPattern("dd MMMM yyyy HH:mm:ss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyyMMdd").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd-MM-yyyy").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyy-MM-dd").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyy/MM/dd").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd MMM yyyy").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd MMMM yyyy").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyyMMddHHmm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyyMMdd HHmm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd-MM-yyyy HH:mm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyy-MM-dd HH:mm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyy/MM/dd HH:mm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd MMM yyyy HH:mm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd MMMM yyyy HH:mm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyyMMddHHmmss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyyMMdd HHmmss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd-MM-yyyy HH:mm:ss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("yyyy/MM/dd HH:mm:ss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd MMM yyyy HH:mm:ss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("HH:mm:ss yyyy/MM/dd").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("HH:mm:ss MM/dd/yyyy").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("HH:mm:ss yyyy-MM-dd").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("HH:mm:ss MM-dd-yyyy").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd/MM/yyyy HH:mm:ss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd/MM/yyyy HH:mm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("dd/MM/yyyy").withZoneUTC().getParser(),
+        UTC_STANDARD_FMT.getParser(),
+        UTC_SUB_SECOND_FMT.getParser(),
+        LOCAL_STANDARD_FMT.getParser(),
+        LOCAL_SUB_SECOND_FMT.getParser()
+    };
+    DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
+    builder.append(null, parsers);
+    DEFAULT_FORMATTER = builder.toFormatter().withZoneUTC();
+
+    DateTimeParser[] altParsers = new DateTimeParser[] {
+        DateTimeFormat.forPattern("MM-dd-yyyy HH:mm:ss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("MM/dd/yyyy HH:mm").withZoneUTC().getParser(),
+        DateTimeFormat.forPattern("MM/dd/yyyy").withZoneUTC().getParser(),
+    };
+    builder = new DateTimeFormatterBuilder();
+    builder.append(null, altParsers);
+    ALT_FORMATTER = builder.toFormatter().withZoneUTC();
+  }
+
+  private RFC3339Utils() {}
+
+  /**
+   * parse String to DateTime
+   * @param toParse DateTime as UTC String
+   * @return DateTime
+   */
+  public static DateTime parseUTC(String toParse) {
+    if (MILLIS.matcher(toParse).matches()) {
+      return new DateTime(Long.valueOf(toParse), DateTimeZone.UTC);
     }
-
-
-    private RFC3339Utils() {}
-
-    public static DateTime parseUTC(String toParse) {
-        if(MILLIS.matcher(toParse).matches()) {
-            return new DateTime(Long.valueOf(toParse), DateTimeZone.UTC);
-        }
-        if(UTC_STANDARD.matcher(toParse).matches()) {
-            return parseUTC(UTC_STANDARD_FMT, toParse);
-        }
-        Matcher utc = UTC_SUB_SECOND.matcher(toParse);
-        if(utc.matches()) {
-            return parseUTC(getSubSecondFormat(utc.group(1), "'Z'"), toParse);
-        }
-        if(LOCAL_STANDARD.matcher(toParse).matches()) {
-            return parseUTC(LOCAL_STANDARD_FMT, toParse);
-        }
-        Matcher local = LOCAL_SUB_SECOND.matcher(toParse);
-        if(local.matches()) {
-            return parseUTC(getSubSecondFormat(local.group(1), "Z"), toParse);
-        }
-        throw new IllegalArgumentException(String.format("Failed to parse date %s. Ensure format is RFC3339 Compliant", toParse));
+    if (UTC_STANDARD.matcher(toParse).matches()) {
+      return parseUTC(UTC_STANDARD_FMT, toParse);
     }
-
-    public static String format(DateTime toFormat) {
-        return UTC_SUB_SECOND_FMT.print(toFormat.getMillis());
+    Matcher utc = UTC_SUB_SECOND.matcher(toParse);
+    if (utc.matches()) {
+      return parseUTC(getSubSecondFormat(utc.group(1), "'Z'"), toParse);
     }
-
-    public static String format(DateTime toFormat, TimeZone tz) {
-        return LOCAL_SUB_SECOND_FMT.withZone(DateTimeZone.forTimeZone(tz)).print(toFormat.getMillis());
+    if (LOCAL_STANDARD.matcher(toParse).matches()) {
+      return parseUTC(LOCAL_STANDARD_FMT, toParse);
     }
-
-    /**
-     * Parses arbitrarily formatted Strings representing dates or dates and times to a {@link org.joda.time.DateTime}
-     * objects.  It first attempts parse with international standards, assuming the dates are either dd MM yyyy or
-     * yyyy MM dd.  If that fails it will try American formats where the month precedes the days of the month.
-     * @param dateString abitrarily formatted date or date and time string
-     * @return {@link org.joda.time.DateTime} representation of the dateString
-     */
-    public static DateTime parseToUTC(String dateString) {
-        if(MILLIS.matcher(dateString).find()) {
-            return new DateTime(Long.parseLong(dateString));
-        }
-        try {
-            return DEFAULT_FORMATTER.parseDateTime(dateString);
-        } catch (Exception e) {
-            return ALT_FORMATTER.parseDateTime(dateString);
-        }
+    Matcher local = LOCAL_SUB_SECOND.matcher(toParse);
+    if (local.matches()) {
+      return parseUTC(getSubSecondFormat(local.group(1), "Z"), toParse);
     }
-
-    /**
-     * Formats an arbitrarily formatted into RFC3339 Specifications.
-     * @param dateString date string to be formatted
-     * @return RFC3339 compliant date string
-     */
-    public static String format(String dateString) {
-        return format(parseToUTC(dateString));
+    throw new IllegalArgumentException(String.format("Failed to parse date %s. Ensure format is RFC3339 Compliant", toParse));
+  }
+
+  private static DateTime parseUTC(DateTimeFormatter formatter, String toParse) {
+    return formatter.parseDateTime(toParse);
+  }
+
+  /**
+   * Parses arbitrarily formatted Strings representing dates or dates and times to a {@link org.joda.time.DateTime}
+   * objects.  It first attempts parse with international standards, assuming the dates are either dd MM yyyy or
+   * yyyy MM dd.  If that fails it will try American formats where the month precedes the days of the month.
+   * @param dateString abitrarily formatted date or date and time string
+   * @return {@link org.joda.time.DateTime} representation of the dateString
+   */
+  public static DateTime parseToUTC(String dateString) {
+    if (MILLIS.matcher(dateString).find()) {
+      return new DateTime(Long.parseLong(dateString));
     }
-
-    private static DateTime parseUTC(DateTimeFormatter formatter, String toParse) {
-        return formatter.parseDateTime(toParse);
+    try {
+      return DEFAULT_FORMATTER.parseDateTime(dateString);
+    } catch (Exception ex) {
+      return ALT_FORMATTER.parseDateTime(dateString);
     }
-
-    private static DateTimeFormatter getSubSecondFormat(String sub, String suffix) {
-        DateTimeFormatter result;
-        //Since RFC3339 allows for any number of sub-second notations, we need to flexibly support more or less than 3
-        //digits; however, if it is exactly 3, just use the standards.
-        if(sub.length() == 3) {
-            result = suffix.equals("Z") ? LOCAL_SUB_SECOND_FMT : UTC_SUB_SECOND_FMT;
-        } else {
-            StringBuilder pattern = new StringBuilder();
-            pattern.append(BASE_FMT);
-            pattern.append(".");
-            for (int i = 0; i < sub.length(); i++) {
-                pattern.append("S");
-            }
-            pattern.append(suffix);
-            result = DateTimeFormat.forPattern(pattern.toString()).withZoneUTC();
-        }
-        return result;
+  }
+
+  /**
+   * Formats an arbitrarily formatted into RFC3339 Specifications.
+   * @param dateString date string to be formatted
+   * @return RFC3339 compliant date string
+   */
+  public static String format(String dateString) {
+    return format(parseToUTC(dateString));
+  }
+
+  public static String format(DateTime toFormat) {
+    return UTC_SUB_SECOND_FMT.print(toFormat.getMillis());
+  }
+
+  public static String format(DateTime toFormat, TimeZone tz) {
+    return LOCAL_SUB_SECOND_FMT.withZone(DateTimeZone.forTimeZone(tz)).print(toFormat.getMillis());
+  }
+
+  private static DateTimeFormatter getSubSecondFormat(String sub, String suffix) {
+    DateTimeFormatter result;
+    //Since RFC3339 allows for any number of sub-second notations, we need to flexibly support more or less than 3
+    //digits; however, if it is exactly 3, just use the standards.
+    if (sub.length() == 3) {
+      result = suffix.equals("Z") ? LOCAL_SUB_SECOND_FMT : UTC_SUB_SECOND_FMT;
+    } else {
+      StringBuilder pattern = new StringBuilder();
+      pattern.append(BASE_FMT);
+      pattern.append(".");
+      for (int i = 0; i < sub.length(); i++) {
+        pattern.append("S");
+      }
+      pattern.append(suffix);
+      result = DateTimeFormat.forPattern(pattern.toString()).withZoneUTC();
     }
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityConversionException.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityConversionException.java b/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityConversionException.java
index a03ec87..96c4bb1 100644
--- a/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityConversionException.java
+++ b/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityConversionException.java
@@ -19,24 +19,24 @@
 package org.apache.streams.exceptions;
 
 /**
- *  ActivityConversionException is a typed exception appropriate when a valid Activity
- *  cannot be created from a given document.
+ * ActivityConversionException is a typed exception appropriate when a valid Activity
+ * cannot be created from a given document.
  */
 public class ActivityConversionException extends Exception {
 
-    public ActivityConversionException() {
-    }
+  public ActivityConversionException() {
+  }
 
-    public ActivityConversionException(String message) {
-        super(message);
-    }
+  public ActivityConversionException(String message) {
+    super(message);
+  }
 
-    public ActivityConversionException(Throwable cause) {
-        super(cause);
-    }
+  public ActivityConversionException(Throwable cause) {
+    super(cause);
+  }
 
-    public ActivityConversionException(String message, Throwable cause) {
-        super(message, cause);
-    }
+  public ActivityConversionException(String message, Throwable cause) {
+    super(message, cause);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityDeserializerException.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityDeserializerException.java b/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityDeserializerException.java
deleted file mode 100644
index 70901d9..0000000
--- a/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivityDeserializerException.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.exceptions;
-
-/**
- * Created by sblackmon on 3/25/14.
- */
-public class ActivityDeserializerException extends Exception {
-
-    public ActivityDeserializerException() {
-        // TODO Auto-generated constructor stub
-    }
-
-    public ActivityDeserializerException(String message) {
-        super(message);
-        // TODO Auto-generated constructor stub
-    }
-
-    public ActivityDeserializerException(Throwable cause) {
-        super(cause);
-        // TODO Auto-generated constructor stub
-    }
-
-    public ActivityDeserializerException(String message, Throwable cause) {
-        super(message, cause);
-        // TODO Auto-generated constructor stub
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivitySerializerException.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivitySerializerException.java b/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivitySerializerException.java
index e58e2da..85a3b8f 100644
--- a/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivitySerializerException.java
+++ b/streams-pojo/src/main/java/org/apache/streams/exceptions/ActivitySerializerException.java
@@ -19,27 +19,28 @@
 package org.apache.streams.exceptions;
 
 /**
- * Created by sblackmon on 3/25/14.
+ * ActivitySerializerException is a typed exception appropriate when a valid Activity
+ * cannot be created from a given document.
  */
 public class ActivitySerializerException extends Exception {
 
-    public ActivitySerializerException() {
-        // TODO Auto-generated constructor stub
-    }
+  public ActivitySerializerException() {
+    // TODO Auto-generated constructor stub
+  }
 
-    public ActivitySerializerException(String message) {
-        super(message);
-        // TODO Auto-generated constructor stub
-    }
+  public ActivitySerializerException(String message) {
+    super(message);
+    // TODO Auto-generated constructor stub
+  }
 
-    public ActivitySerializerException(Throwable cause) {
-        super(cause);
-        // TODO Auto-generated constructor stub
-    }
+  public ActivitySerializerException(Throwable cause) {
+    super(cause);
+    // TODO Auto-generated constructor stub
+  }
 
-    public ActivitySerializerException(String message, Throwable cause) {
-        super(message, cause);
-        // TODO Auto-generated constructor stub
-    }
+  public ActivitySerializerException(String message, Throwable cause) {
+    super(message, cause);
+    // TODO Auto-generated constructor stub
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeDeserializer.java b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeDeserializer.java
index 43813d2..d64fd53 100644
--- a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeDeserializer.java
+++ b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeDeserializer.java
@@ -18,11 +18,12 @@
 
 package org.apache.streams.jackson;
 
+import org.apache.streams.data.util.RFC3339Utils;
+
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
 import com.google.common.collect.Lists;
-import org.apache.streams.data.util.RFC3339Utils;
 import org.joda.time.DateTime;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
@@ -38,42 +39,43 @@ import java.util.List;
  * StreamsDateTimeDeserializer is a supporting class for
  * @see {@link org.apache.streams.jackson.StreamsJacksonMapper}
  *
+ * <p/>
  * Converting date-time strings other than RFC3339 to joda DateTime objects requires
  * additional formats to be provided when instantiating StreamsJacksonMapper.
  */
 public class StreamsDateTimeDeserializer extends StdDeserializer<DateTime> implements Serializable {
 
-    List<DateTimeFormatter> formatters = Lists.newArrayList();
+  List<DateTimeFormatter> formatters = Lists.newArrayList();
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsDateTimeDeserializer.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsDateTimeDeserializer.class);
 
-    protected StreamsDateTimeDeserializer(Class<DateTime> dateTimeClass) {
-        super(dateTimeClass);
-    }
+  protected StreamsDateTimeDeserializer(Class<DateTime> dateTimeClass) {
+    super(dateTimeClass);
+  }
 
-    protected StreamsDateTimeDeserializer(Class<DateTime> dateTimeClass, List<String> formats) {
-        super(dateTimeClass);
-        for( String format : formats ) {
-            try {
-                formatters.add(DateTimeFormat.forPattern(format));
-            } catch (Exception e) {
-                LOGGER.warn("Exception parsing format " + format);
-            }
-        }
+  protected StreamsDateTimeDeserializer(Class<DateTime> dateTimeClass, List<String> formats) {
+    super(dateTimeClass);
+    for ( String format : formats ) {
+      try {
+        formatters.add(DateTimeFormat.forPattern(format));
+      } catch (Exception ex) {
+        LOGGER.warn("Exception parsing format " + format);
+      }
     }
+  }
 
-    /**
-     * Applies each additional format in turn, until it can provide a non-null DateTime
-     */
-    @Override
-    public DateTime deserialize(JsonParser jpar, DeserializationContext context) throws IOException {
+  /**
+   * Applies each additional format in turn, until it can provide a non-null DateTime
+   */
+  @Override
+  public DateTime deserialize(JsonParser jpar, DeserializationContext context) throws IOException {
 
-        DateTime result = RFC3339Utils.parseToUTC(jpar.getValueAsString());
-        Iterator<DateTimeFormatter> iterator = formatters.iterator();
-        while( result == null && iterator.hasNext()) {
-            DateTimeFormatter formatter = iterator.next();
-            result = formatter.parseDateTime(jpar.getValueAsString());
-        }
-        return result;
+    DateTime result = RFC3339Utils.parseToUTC(jpar.getValueAsString());
+    Iterator<DateTimeFormatter> iterator = formatters.iterator();
+    while ( result == null && iterator.hasNext()) {
+      DateTimeFormatter formatter = iterator.next();
+      result = formatter.parseDateTime(jpar.getValueAsString());
     }
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeFormat.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeFormat.java b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeFormat.java
index e9cab58..a3cc936 100644
--- a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeFormat.java
+++ b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeFormat.java
@@ -19,12 +19,13 @@
 package org.apache.streams.jackson;
 
 /**
- * Supplies a custom date-time format to StreamsJacksonModule
+ * Supplies a custom date-time format to StreamsJacksonModule.
  *
+ * <p/>
  * Implementations must have a no-argument constructor
  */
 public interface StreamsDateTimeFormat {
 
-    public String getFormat();
+  public String getFormat();
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeSerializer.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeSerializer.java b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeSerializer.java
index fff314d..0bae22f 100644
--- a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeSerializer.java
+++ b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsDateTimeSerializer.java
@@ -18,10 +18,11 @@
 
 package org.apache.streams.jackson;
 
+import org.apache.streams.data.util.RFC3339Utils;
+
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.databind.SerializerProvider;
 import com.fasterxml.jackson.databind.ser.std.StdSerializer;
-import org.apache.streams.data.util.RFC3339Utils;
 import org.joda.time.DateTime;
 
 import java.io.IOException;
@@ -29,16 +30,16 @@ import java.io.Serializable;
 
 /**
  * StreamsDateTimeSerializer is a supporting class for
- * @see {@link org.apache.streams.jackson.StreamsJacksonMapper}
+ * @see {@link org.apache.streams.jackson.StreamsJacksonMapper}.
  */
 public class StreamsDateTimeSerializer extends StdSerializer<DateTime> implements Serializable {
 
-    protected StreamsDateTimeSerializer(Class<DateTime> dateTimeClass) {
-        super(dateTimeClass);
-    }
+  protected StreamsDateTimeSerializer(Class<DateTime> dateTimeClass) {
+    super(dateTimeClass);
+  }
 
-    @Override
-    public void serialize(DateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException {
-        jgen.writeString(RFC3339Utils.getInstance().format(value));
-    }
+  @Override
+  public void serialize(DateTime value, JsonGenerator jgen, SerializerProvider provider) throws IOException {
+    jgen.writeString(RFC3339Utils.getInstance().format(value));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonMapper.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonMapper.java b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonMapper.java
index 2492b2f..6df9f85 100644
--- a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonMapper.java
+++ b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonMapper.java
@@ -18,6 +18,8 @@
 
 package org.apache.streams.jackson;
 
+import org.apache.streams.pojo.StreamsJacksonMapperConfiguration;
+
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.PropertyAccessor;
@@ -28,7 +30,6 @@ import com.fasterxml.jackson.databind.SerializationFeature;
 import com.fasterxml.jackson.module.scala.DefaultScalaModule;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import org.apache.streams.pojo.StreamsJacksonMapperConfiguration;
 
 import java.util.List;
 import java.util.Map;
@@ -36,99 +37,128 @@ import java.util.Map;
 /**
  * StreamsJacksonMapper is the recommended interface to jackson for any streams component.
  *
+ * <p/>
  * Date-time formats that must be supported can be specified with constructor arguments.
  *
+ * <p/>
  * If no Date-time formats are specified, streams will use reflection to find formats.
  */
 public class StreamsJacksonMapper extends ObjectMapper {
 
-    private static Map<StreamsJacksonMapperConfiguration, StreamsJacksonMapper> INSTANCE_MAP = Maps.newConcurrentMap();
-
-    private StreamsJacksonMapperConfiguration configuration = new StreamsJacksonMapperConfiguration();
-
-    public static StreamsJacksonMapper getInstance() {
-        return getInstance(new StreamsJacksonMapperConfiguration());
-    }
-
-    public static StreamsJacksonMapper getInstance(StreamsJacksonMapperConfiguration configuration) {
-        if( INSTANCE_MAP.containsKey(configuration) &&
-                INSTANCE_MAP.get(configuration) != null)
-            return INSTANCE_MAP.get(configuration);
-        else {
-            INSTANCE_MAP.put(configuration, new StreamsJacksonMapper(configuration));
-            return INSTANCE_MAP.get(configuration);
-        }
-    }
-
-    public static StreamsJacksonMapper getInstance(String format){
-
-        StreamsJacksonMapper instance = new StreamsJacksonMapper(Lists.newArrayList(format));
-
-        return instance;
-
+  private static Map<StreamsJacksonMapperConfiguration, StreamsJacksonMapper> INSTANCE_MAP = Maps.newConcurrentMap();
+
+  private StreamsJacksonMapperConfiguration configuration = new StreamsJacksonMapperConfiguration();
+
+  /**
+   * get default StreamsJacksonMapper.
+   * @return StreamsJacksonMapper
+   */
+  public static StreamsJacksonMapper getInstance() {
+    return getInstance(new StreamsJacksonMapperConfiguration());
+  }
+
+  /**
+   * get custom StreamsJacksonMapper.
+   * @param configuration StreamsJacksonMapperConfiguration
+   * @return StreamsJacksonMapper
+   */
+  public static StreamsJacksonMapper getInstance(StreamsJacksonMapperConfiguration configuration) {
+    if ( INSTANCE_MAP.containsKey(configuration)
+         &&
+         INSTANCE_MAP.get(configuration) != null) {
+      return INSTANCE_MAP.get(configuration);
+    } else {
+      INSTANCE_MAP.put(configuration, new StreamsJacksonMapper(configuration));
+      return INSTANCE_MAP.get(configuration);
     }
-    public static StreamsJacksonMapper getInstance(List<String> formats){
-
-        StreamsJacksonMapper instance = new StreamsJacksonMapper(formats);
-
-        return instance;
-
+  }
+
+  /**
+   * get custom StreamsJacksonMapper.
+   * @param format format
+   * @return StreamsJacksonMapper
+   */
+  @Deprecated
+  public static StreamsJacksonMapper getInstance(String format) {
+
+    StreamsJacksonMapper instance = new StreamsJacksonMapper(Lists.newArrayList(format));
+
+    return instance;
+
+  }
+
+  /**
+   * get custom StreamsJacksonMapper.
+   * @param formats formats
+   * @return StreamsJacksonMapper
+   */
+  @Deprecated
+  public static StreamsJacksonMapper getInstance(List<String> formats) {
+
+    StreamsJacksonMapper instance = new StreamsJacksonMapper(formats);
+
+    return instance;
+
+  }
+
+  /*
+    Use getInstance to get a globally shared thread-safe ObjectMapper,
+    rather than call this constructor.  Reflection-based resolution of
+    date-time formats across all modules can be slow and should only happen
+    once per JVM.
+   */
+  protected StreamsJacksonMapper() {
+    super();
+    registerModule(new StreamsJacksonModule(configuration.getDateFormats()));
+    if ( configuration.getEnableScala()) {
+      registerModule(new DefaultScalaModule());
     }
-
-    /*
-      Use getInstance to get a globally shared thread-safe ObjectMapper,
-      rather than call this constructor.  Reflection-based resolution of
-      date-time formats across all modules can be slow and should only happen
-      once per JVM.
-     */
-    protected StreamsJacksonMapper() {
-        super();
-        registerModule(new StreamsJacksonModule(configuration.getDateFormats()));
-        if( configuration.getEnableScala())
-            registerModule(new DefaultScalaModule());
-        configure();
+    configure();
+  }
+
+  @Deprecated
+  public StreamsJacksonMapper(String format) {
+    super();
+    registerModule(new StreamsJacksonModule(Lists.newArrayList(format)));
+    if ( configuration.getEnableScala()) {
+      registerModule(new DefaultScalaModule());
     }
-
-    @Deprecated
-    public StreamsJacksonMapper(String format) {
-        super();
-        registerModule(new StreamsJacksonModule(Lists.newArrayList(format)));
-        if( configuration.getEnableScala())
-            registerModule(new DefaultScalaModule());
-        configure();
+    configure();
+  }
+
+  @Deprecated
+  public StreamsJacksonMapper(List<String> formats) {
+    super();
+    registerModule(new StreamsJacksonModule(formats));
+    if ( configuration.getEnableScala()) {
+      registerModule(new DefaultScalaModule());
     }
-
-    @Deprecated
-    public StreamsJacksonMapper(List<String> formats) {
-        super();
-        registerModule(new StreamsJacksonModule(formats));
-        if( configuration.getEnableScala())
-            registerModule(new DefaultScalaModule());
-        configure();
-    }
-
-    public StreamsJacksonMapper(StreamsJacksonMapperConfiguration configuration) {
-        super();
-        registerModule(new StreamsJacksonModule(configuration.getDateFormats()));
-        if( configuration.getEnableScala())
-            registerModule(new DefaultScalaModule());
-        configure();
-    }
-
-    public void configure() {
-        disable(com.fasterxml.jackson.databind.SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
-        configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.FALSE);
-        configure(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE, Boolean.TRUE);
-        configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
-        configure(DeserializationFeature.WRAP_EXCEPTIONS, Boolean.FALSE);
-        configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, Boolean.TRUE);
-        // If a user has an 'object' that does not have an explicit mapping, don't cause the serialization to fail.
-        configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, Boolean.FALSE);
-        configure(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS, Boolean.FALSE);
-        configure(SerializationFeature.WRITE_NULL_MAP_VALUES, Boolean.FALSE);
-        setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.DEFAULT);
-        setSerializationInclusion(JsonInclude.Include.NON_EMPTY);
+    configure();
+  }
+
+  public StreamsJacksonMapper(StreamsJacksonMapperConfiguration configuration) {
+    super();
+    registerModule(new StreamsJacksonModule(configuration.getDateFormats()));
+    if ( configuration.getEnableScala()) {
+      registerModule(new DefaultScalaModule());
     }
+    configure();
+  }
+
+  public void configure() {
+    disable(com.fasterxml.jackson.databind.SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
+    configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.FALSE);
+    configure(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE, Boolean.TRUE);
+    configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+    configure(DeserializationFeature.WRAP_EXCEPTIONS, Boolean.FALSE);
+    configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, Boolean.TRUE);
+    // If a user has an 'object' that does not have an explicit mapping, don't cause the serialization to fail.
+    configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, Boolean.FALSE);
+    configure(SerializationFeature.WRITE_EMPTY_JSON_ARRAYS, Boolean.FALSE);
+    configure(SerializationFeature.WRITE_NULL_MAP_VALUES, Boolean.FALSE);
+    setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.DEFAULT);
+    setSerializationInclusion(JsonInclude.Include.NON_EMPTY);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonModule.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonModule.java b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonModule.java
index 4e02441..6696885 100644
--- a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonModule.java
+++ b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsJacksonModule.java
@@ -21,7 +21,6 @@ package org.apache.streams.jackson;
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import org.joda.time.DateTime;
 import org.joda.time.Period;
-
 import org.reflections.Reflections;
 import org.reflections.scanners.SubTypesScanner;
 import org.reflections.util.ClasspathHelper;
@@ -35,46 +34,47 @@ import java.util.Set;
 
 /**
  * StreamsJacksonModule is a supporting class for
- * @see {@link org.apache.streams.jackson.StreamsJacksonMapper}
+ * @see {@link org.apache.streams.jackson.StreamsJacksonMapper}.
  *
+ * <p/>
  * RFC3339 dates are supported by default.
  */
 public class StreamsJacksonModule extends SimpleModule {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsJacksonModule.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsJacksonModule.class);
 
-    public StreamsJacksonModule() {
-        super();
+  public StreamsJacksonModule() {
+    super();
 
-        Reflections reflections = new Reflections(new ConfigurationBuilder()
-                                                  .setUrls(ClasspathHelper.forPackage("org.apache.streams.jackson"))
-                                                  .setScanners(new SubTypesScanner()));
+    Reflections reflections = new Reflections(new ConfigurationBuilder()
+        .setUrls(ClasspathHelper.forPackage("org.apache.streams.jackson"))
+        .setScanners(new SubTypesScanner()));
 
-        Set<Class<? extends StreamsDateTimeFormat>> dateTimeFormatClasses = reflections.getSubTypesOf(StreamsDateTimeFormat.class);
+    Set<Class<? extends StreamsDateTimeFormat>> dateTimeFormatClasses = reflections.getSubTypesOf(StreamsDateTimeFormat.class);
 
-        List<String> dateTimeFormats = new ArrayList<>();
-        for (Class dateTimeFormatClass : dateTimeFormatClasses) {
-            try {
-                dateTimeFormats.add(((StreamsDateTimeFormat) (dateTimeFormatClass.newInstance())).getFormat());
-            } catch (Exception e) {
-                LOGGER.warn("Exception getting format from " + dateTimeFormatClass);
-            }
-        }
+    List<String> dateTimeFormats = new ArrayList<>();
+    for (Class dateTimeFormatClass : dateTimeFormatClasses) {
+      try {
+        dateTimeFormats.add(((StreamsDateTimeFormat) (dateTimeFormatClass.newInstance())).getFormat());
+      } catch (Exception ex) {
+        LOGGER.warn("Exception getting format from " + dateTimeFormatClass);
+      }
+    }
 
-        addSerializer(DateTime.class, new StreamsDateTimeSerializer(DateTime.class));
-        addDeserializer(DateTime.class, new StreamsDateTimeDeserializer(DateTime.class, dateTimeFormats));
+    addSerializer(DateTime.class, new StreamsDateTimeSerializer(DateTime.class));
+    addDeserializer(DateTime.class, new StreamsDateTimeDeserializer(DateTime.class, dateTimeFormats));
 
-        addSerializer(Period.class, new StreamsPeriodSerializer(Period.class));
-        addDeserializer(Period.class, new StreamsPeriodDeserializer(Period.class));
-    }
+    addSerializer(Period.class, new StreamsPeriodSerializer(Period.class));
+    addDeserializer(Period.class, new StreamsPeriodDeserializer(Period.class));
+  }
 
-    public StreamsJacksonModule(List<String> formats) {
-        super();
+  public StreamsJacksonModule(List<String> formats) {
+    super();
 
-        addSerializer(DateTime.class, new StreamsDateTimeSerializer(DateTime.class));
-        addDeserializer(DateTime.class, new StreamsDateTimeDeserializer(DateTime.class, formats));
+    addSerializer(DateTime.class, new StreamsDateTimeSerializer(DateTime.class));
+    addDeserializer(DateTime.class, new StreamsDateTimeDeserializer(DateTime.class, formats));
 
-        addSerializer(Period.class, new StreamsPeriodSerializer(Period.class));
-        addDeserializer(Period.class, new StreamsPeriodDeserializer(Period.class));
-    }
+    addSerializer(Period.class, new StreamsPeriodSerializer(Period.class));
+    addDeserializer(Period.class, new StreamsPeriodDeserializer(Period.class));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodDeserializer.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodDeserializer.java b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodDeserializer.java
index 56487cd..b205a10 100644
--- a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodDeserializer.java
+++ b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodDeserializer.java
@@ -26,14 +26,16 @@ import org.joda.time.Period;
 import java.io.IOException;
 import java.io.Serializable;
 
-public class StreamsPeriodDeserializer extends StdDeserializer<Period> implements Serializable
-{
+/**
+ * StdDeserializer of Period.
+ */
+public class StreamsPeriodDeserializer extends StdDeserializer<Period> implements Serializable {
 
-    protected StreamsPeriodDeserializer(Class<Period> dateTimeClass) {
-        super(dateTimeClass);
-    }
+  protected StreamsPeriodDeserializer(Class<Period> dateTimeClass) {
+    super(dateTimeClass);
+  }
 
-    public Period deserialize(JsonParser jpar, DeserializationContext context) throws IOException {
-        return Period.millis(jpar.getIntValue());
-    }
+  public Period deserialize(JsonParser jpar, DeserializationContext context) throws IOException {
+    return Period.millis(jpar.getIntValue());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodSerializer.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodSerializer.java b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodSerializer.java
index f72d337..3676615 100644
--- a/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodSerializer.java
+++ b/streams-pojo/src/main/java/org/apache/streams/jackson/StreamsPeriodSerializer.java
@@ -26,15 +26,17 @@ import org.joda.time.Period;
 import java.io.IOException;
 import java.io.Serializable;
 
-public class StreamsPeriodSerializer extends StdSerializer<Period> implements Serializable
-{
-    protected StreamsPeriodSerializer(Class<Period> dateTimeClass) {
-        super(dateTimeClass);
-    }
+/**
+ * StdSerializer of Period.
+ */
+public class StreamsPeriodSerializer extends StdSerializer<Period> implements Serializable {
+
+  protected StreamsPeriodSerializer(Class<Period> dateTimeClass) {
+    super(dateTimeClass);
+  }
 
-    @Override
-    public void serialize(Period value, JsonGenerator jgen, SerializerProvider provider) throws IOException
-    {
-        jgen.writeString(Integer.toString(value.getMillis()));
-    }
+  @Override
+  public void serialize(Period value, JsonGenerator jgen, SerializerProvider provider) throws IOException {
+    jgen.writeString(Integer.toString(value.getMillis()));
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/test/java/org/apache/streams/pojo/test/ActivitySerDeTest.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/test/java/org/apache/streams/pojo/test/ActivitySerDeTest.java b/streams-pojo/src/test/java/org/apache/streams/pojo/test/ActivitySerDeTest.java
index fe00763..1bd8427 100644
--- a/streams-pojo/src/test/java/org/apache/streams/pojo/test/ActivitySerDeTest.java
+++ b/streams-pojo/src/test/java/org/apache/streams/pojo/test/ActivitySerDeTest.java
@@ -18,11 +18,12 @@
 
 package org.apache.streams.pojo.test;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.commons.io.Charsets;
 import org.apache.commons.io.IOUtils;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.junit.Test;
 import org.reflections.Reflections;
 import org.reflections.scanners.SubTypesScanner;
@@ -41,64 +42,64 @@ import java.util.Set;
  */
 public class ActivitySerDeTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ActivitySerDeTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ActivitySerDeTest.class);
 
-    private final static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-    /**
-     * Tests that all example activities can be loaded into Activity beans
-     * @throws Exception
-     */
-    @Test
-    public void testActivitySerDe() throws Exception {
+  /**
+   * Tests that all example activities can be loaded into Activity beans.
+   * @throws Exception Exception
+   */
+  @Test
+  public void testActivitySerDe() throws Exception {
 
-        InputStream testActivityFolderStream = ActivitySerDeTest.class.getClassLoader()
-                .getResourceAsStream("activitystreams-testdocs/activities");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+    InputStream testActivityFolderStream = ActivitySerDeTest.class.getClassLoader()
+        .getResourceAsStream("activitystreams-testdocs/activities");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
 
-        for( String file : files) {
-            LOGGER.info("File: " + file );
-            LOGGER.info("Serializing: activities/" + file );
-            InputStream testActivityFileStream = ActivitySerDeTest.class.getClassLoader()
-                    .getResourceAsStream("activities/" + file);
-            Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
-            activity.setGenerator(null);
-            activity.setLinks(new LinkedList<String>());
-            String activityString = MAPPER.writeValueAsString(activity);
-            LOGGER.info("Deserialized: " + activityString );
-            assert( !activityString.contains("null") );
-            assert( !activityString.contains("[]") );
-        }
+    for ( String file : files) {
+      LOGGER.info("File: " + file );
+      LOGGER.info("Serializing: activities/" + file );
+      InputStream testActivityFileStream = ActivitySerDeTest.class.getClassLoader()
+          .getResourceAsStream("activities/" + file);
+      Activity activity = MAPPER.readValue(testActivityFileStream, Activity.class);
+      activity.setGenerator(null);
+      activity.setLinks(new LinkedList<String>());
+      String activityString = MAPPER.writeValueAsString(activity);
+      LOGGER.info("Deserialized: " + activityString );
+      assert ( !activityString.contains("null") );
+      assert ( !activityString.contains("[]") );
     }
+  }
 
-    /**
-     * Tests that defined activity verbs have an example which can be loaded into
-     * Activity beans and into verb-specific beans
-     * @throws Exception
-     */
-    @Test
-    public void testVerbSerDe() throws Exception {
+  /**
+   * Tests that defined activity verbs have an example which can be loaded into
+   * Activity beans and into verb-specific beans.
+   * @throws Exception Exception
+   */
+  @Test
+  public void testVerbSerDe() throws Exception {
 
-        Reflections reflections = new Reflections(new ConfigurationBuilder()
-                .setUrls(ClasspathHelper.forPackage("org.apache.streams.pojo.json"))
-                .setScanners(new SubTypesScanner()));
-        Set<Class<? extends Activity>> verbs = reflections.getSubTypesOf(Activity.class);
+    Reflections reflections = new Reflections(new ConfigurationBuilder()
+        .setUrls(ClasspathHelper.forPackage("org.apache.streams.pojo.json"))
+        .setScanners(new SubTypesScanner()));
+    Set<Class<? extends Activity>> verbs = reflections.getSubTypesOf(Activity.class);
 
-        for( Class verbClass : verbs) {
-            LOGGER.info("Verb: " + verbClass.getSimpleName() );
-            Activity activity = (Activity) verbClass.newInstance();
-            String verbName = activity.getVerb();
-            String testfile = verbName.toLowerCase() + ".json";
-            LOGGER.info("Serializing: activities/" + testfile );
-            assert(ActivitySerDeTest.class.getClassLoader().getResource("activities/" + testfile) != null);
-            InputStream testActivityFileStream = ActivitySerDeTest.class.getClassLoader()
-                    .getResourceAsStream("activities/" + testfile);
-            assert(testActivityFileStream != null);
-            activity = MAPPER.convertValue(MAPPER.readValue(testActivityFileStream, verbClass), Activity.class);
-            String activityString = MAPPER.writeValueAsString(activity);
-            LOGGER.info("Deserialized: " + activityString );
-            assert( !activityString.contains("null") );
-            assert( !activityString.contains("[]") );
-        }
+    for ( Class verbClass : verbs) {
+      LOGGER.info("Verb: " + verbClass.getSimpleName() );
+      Activity activity = (Activity) verbClass.newInstance();
+      String verbName = activity.getVerb();
+      String testfile = verbName.toLowerCase() + ".json";
+      LOGGER.info("Serializing: activities/" + testfile );
+      assert (ActivitySerDeTest.class.getClassLoader().getResource("activities/" + testfile) != null);
+      InputStream testActivityFileStream = ActivitySerDeTest.class.getClassLoader()
+          .getResourceAsStream("activities/" + testfile);
+      assert (testActivityFileStream != null);
+      activity = MAPPER.convertValue(MAPPER.readValue(testActivityFileStream, verbClass), Activity.class);
+      String activityString = MAPPER.writeValueAsString(activity);
+      LOGGER.info("Deserialized: " + activityString );
+      assert ( !activityString.contains("null") );
+      assert ( !activityString.contains("[]") );
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/test/java/org/apache/streams/pojo/test/CustomDateTimeFormatTest.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/test/java/org/apache/streams/pojo/test/CustomDateTimeFormatTest.java b/streams-pojo/src/test/java/org/apache/streams/pojo/test/CustomDateTimeFormatTest.java
index eabde96..8fa927d 100644
--- a/streams-pojo/src/test/java/org/apache/streams/pojo/test/CustomDateTimeFormatTest.java
+++ b/streams-pojo/src/test/java/org/apache/streams/pojo/test/CustomDateTimeFormatTest.java
@@ -18,9 +18,10 @@
 
 package org.apache.streams.pojo.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -32,44 +33,44 @@ import static org.junit.Assert.assertEquals;
  */
 public class CustomDateTimeFormatTest {
 
-    @Test
-    public void testCustomDateTimeFormatExplicit() {
-        String format = "EEE MMM dd HH:mm:ss Z yyyy";
-        String input = "Tue Jan 17 21:21:46 Z 2012";
-        Long outputMillis = 1326835306000L;
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance(format);
-        try {
-            String json = "{\"published\":\"" + input + "\"}";
-            Activity activity = mapper.readValue(json, Activity.class);
+  @Test
+  public void testCustomDateTimeFormatExplicit() {
+    String format = "EEE MMM dd HH:mm:ss Z yyyy";
+    String input = "Tue Jan 17 21:21:46 Z 2012";
+    Long outputMillis = 1326835306000L;
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance(format);
+    try {
+      String json = "{\"published\":\"" + input + "\"}";
+      Activity activity = mapper.readValue(json, Activity.class);
 
-            //Writes out value as a String including quotes
-            Long result = activity.getPublished().getMillis();
+      //Writes out value as a String including quotes
+      Long result = activity.getPublished().getMillis();
 
-            assertEquals(result, outputMillis);
-        } catch (Exception e) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+      assertEquals(result, outputMillis);
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void testCustomDateTimeFormatReflection() {
-        String input = "Tue Jan 17 21:21:46 Z 2012";
-        Long outputMillis = 1326835306000L;
-        ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-        try {
-            String json = "{\"published\":\"" + input + "\"}";
-            Activity activity = mapper.readValue(json, Activity.class);
+  @Test
+  public void testCustomDateTimeFormatReflection() {
+    String input = "Tue Jan 17 21:21:46 Z 2012";
+    Long outputMillis = 1326835306000L;
+    ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+    try {
+      String json = "{\"published\":\"" + input + "\"}";
+      Activity activity = mapper.readValue(json, Activity.class);
 
-            //Writes out value as a String including quotes
-            Long result = activity.getPublished().getMillis();
+      //Writes out value as a String including quotes
+      Long result = activity.getPublished().getMillis();
 
-            assertEquals(result, outputMillis);
-        } catch (Exception e) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+      assertEquals(result, outputMillis);
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      Assert.fail();
     }
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-pojo/src/test/java/org/apache/streams/pojo/test/DateTimeSerDeTest.java
----------------------------------------------------------------------
diff --git a/streams-pojo/src/test/java/org/apache/streams/pojo/test/DateTimeSerDeTest.java b/streams-pojo/src/test/java/org/apache/streams/pojo/test/DateTimeSerDeTest.java
index 9fdda61..9a7a5ac 100644
--- a/streams-pojo/src/test/java/org/apache/streams/pojo/test/DateTimeSerDeTest.java
+++ b/streams-pojo/src/test/java/org/apache/streams/pojo/test/DateTimeSerDeTest.java
@@ -18,8 +18,9 @@
 
 package org.apache.streams.pojo.test;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
 import org.joda.time.DateTime;
 import org.junit.Assert;
 import org.junit.Ignore;
@@ -35,47 +36,47 @@ import static org.junit.Assert.assertEquals;
  */
 public class DateTimeSerDeTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(DateTimeSerDeTest.class);
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final Logger LOGGER = LoggerFactory.getLogger(DateTimeSerDeTest.class);
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Ignore
-    @Test
-    // this really needs to be able to pass...
-    public void testActivityStringSer() {
-        String input = "2012-01-17T21:21:46.000Z";
-        try {
-            DateTime output = mapper.readValue(input, DateTime.class);
-        } catch (Exception e) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+  @Ignore
+  @Test
+  // this really needs to be able to pass...
+  public void testActivityStringSer() {
+    String input = "2012-01-17T21:21:46.000Z";
+    try {
+      DateTime output = mapper.readValue(input, DateTime.class);
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void testMillisDeser() {
-        Long input = 1326856906000l;
-        try {
-            DateTime output = mapper.readValue(input.toString(), DateTime.class);
-        } catch (Exception e) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+  @Test
+  public void testMillisDeser() {
+    Long input = 1326856906000L;
+    try {
+      DateTime output = mapper.readValue(input.toString(), DateTime.class);
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      Assert.fail();
     }
+  }
 
-    @Test
-    public void testActivityStringDeser() {
-        String output = "2012-01-17T21:21:46.000Z";
-        long inputMillis = 1326835306000L;
-        DateTime input;
-        try {
-            input = new DateTime(inputMillis);
-            //Writes out value as a String including quotes
-            String result = mapper.writeValueAsString(input);
-            assertEquals(result.replace("\"", ""), output);
-        } catch (Exception e) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+  @Test
+  public void testActivityStringDeser() {
+    String output = "2012-01-17T21:21:46.000Z";
+    long inputMillis = 1326835306000L;
+    DateTime input;
+    try {
+      input = new DateTime(inputMillis);
+      //Writes out value as a String including quotes
+      String result = mapper.writeValueAsString(input);
+      assertEquals(result.replace("\"", ""), output);
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      Assert.fail();
     }
+  }
 
 }


[12/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGeneratorMojo.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGeneratorMojo.java b/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGeneratorMojo.java
index 8638046..eb9490d 100644
--- a/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGeneratorMojo.java
+++ b/streams-plugins/streams-plugin-hive/src/main/java/org/apache/streams/plugins/hive/StreamsHiveResourceGeneratorMojo.java
@@ -34,49 +34,57 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.util.List;
 
-@Mojo(  name = "generate-resources",
-        defaultPhase = LifecyclePhase.GENERATE_RESOURCES
-)
-@Execute(   goal = "generate-resources",
-            phase = LifecyclePhase.GENERATE_RESOURCES
-)
+@Mojo (
+    name = "generate-resources",
+    defaultPhase = LifecyclePhase.GENERATE_RESOURCES
+    )
+@Execute (
+    goal = "generate-resources",
+    phase = LifecyclePhase.GENERATE_RESOURCES
+    )
 public class StreamsHiveResourceGeneratorMojo extends AbstractMojo {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsHiveResourceGeneratorMojo.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsHiveResourceGeneratorMojo.class);
 
-    private volatile MojoFailureException mojoFailureException;
+  private volatile MojoFailureException mojoFailureException;
 
-    @Component
-    private MavenProject project;
+  @Component
+  private MavenProject project;
 
-    @Parameter( defaultValue = "${project.basedir}", readonly = true )
-    private File basedir;
+  @Parameter( defaultValue = "${project.basedir}", readonly = true )
+  private File basedir;
 
-    @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
-    public String sourceDirectory;
+  @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
+  public String sourceDirectory;
 
-    @Parameter( readonly = true ) // Maven 3 only
-    public List<String> sourcePaths;
+  @Parameter( readonly = true ) // Maven 3 only
+  public List<String> sourcePaths;
 
-    @Parameter(defaultValue = "./target/generated-resources/hive", readonly = true)
-    public String targetDirectory;
+  @Parameter(defaultValue = "./target/generated-resources/hive", readonly = true)
+  public String targetDirectory;
 
-    public void execute() throws MojoExecutionException, MojoFailureException {
+  /**
+   * execute StreamsHiveResourceGeneratorMojo.
+   * @throws MojoExecutionException MojoExecutionException
+   * @throws MojoFailureException MojoFailureException
+   */
+  public void execute() throws MojoExecutionException, MojoFailureException {
 
-        //addProjectDependenciesToClasspath();
+    //addProjectDependenciesToClasspath();
 
-        StreamsHiveGenerationConfig config = new StreamsHiveGenerationConfig();
+    StreamsHiveGenerationConfig config = new StreamsHiveGenerationConfig();
 
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            config.setSourcePaths(sourcePaths);
-        else
-            config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      config.setSourcePaths(sourcePaths);
+    } else {
+      config.setSourceDirectory(sourceDirectory);
+    }
+    config.setTargetDirectory(targetDirectory);
 
-        StreamsHiveResourceGenerator streamsHiveResourceGenerator = new StreamsHiveResourceGenerator(config);
+    StreamsHiveResourceGenerator streamsHiveResourceGenerator = new StreamsHiveResourceGenerator(config);
 
-        streamsHiveResourceGenerator.run();
+    streamsHiveResourceGenerator.run();
 
-    }
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorCLITest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorCLITest.java b/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorCLITest.java
index 33113aa..827a992 100644
--- a/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorCLITest.java
+++ b/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorCLITest.java
@@ -19,13 +19,12 @@
 
 package org.apache.streams.plugins.test;
 
-import com.google.common.base.Predicate;
+import org.apache.streams.plugins.hive.StreamsHiveResourceGenerator;
+
 import com.google.common.collect.Lists;
 import com.google.common.io.Files;
-import org.apache.streams.plugins.hive.StreamsHiveResourceGenerator;
 import org.junit.Test;
 
-import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Collection;
 import java.util.List;
@@ -33,28 +32,28 @@ import java.util.List;
 import static org.apache.streams.plugins.test.StreamsHiveResourceGeneratorTest.hqlFilter;
 
 /**
- * Created by sblackmon on 5/5/16.
+ * Test whether StreamsHiveResourceGeneratorCLI generates resources.
  */
 public class StreamsHiveResourceGeneratorCLITest {
 
-    @Test
-    public void testStreamsHiveResourceGeneratorCLI() throws Exception {
+  @Test
+  public void testStreamsHiveResourceGeneratorCLI() throws Exception {
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
-        String targetDirectory = "target/generated-resources/hive-cli";
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    String targetDirectory = "target/generated-resources/hive-cli";
 
-        List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
-        StreamsHiveResourceGenerator.main(argsList.toArray(new String[0]));
+    List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
+    StreamsHiveResourceGenerator.main(argsList.toArray(new String[0]));
 
-        File testOutput = new File(targetDirectory);
+    File testOutput = new File(targetDirectory);
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(hqlFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(hqlFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorMojoIT.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorMojoIT.java b/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorMojoIT.java
index e78a175..b5ea4b4 100644
--- a/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorMojoIT.java
+++ b/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorMojoIT.java
@@ -36,48 +36,47 @@ import java.util.List;
 import static org.apache.streams.plugins.test.StreamsHiveResourceGeneratorTest.hqlFilter;
 
 /**
- * Tests that streams-plugin-hive running via maven generates hql resources
+ * Tests that streams-plugin-hive running via maven generates txt resources.
  */
 public class StreamsHiveResourceGeneratorMojoIT extends TestCase {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsHiveResourceGeneratorMojoIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsHiveResourceGeneratorMojoIT.class);
 
-    protected void setUp() throws Exception
-    {
-        // required for mojo lookups to work
-        super.setUp();
-    }
+  protected void setUp() throws Exception {
+    // required for mojo lookups to work
+    super.setUp();
+  }
 
 
-    @Test
-    public void testStreamsHiveResourceGeneratorMojo() throws Exception {
+  @Test
+  public void testStreamsHiveResourceGeneratorMojo() throws Exception {
 
-        File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-hive" );
+    File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-hive" );
 
-        Verifier verifier;
+    Verifier verifier;
 
-        verifier = new Verifier( testDir.getAbsolutePath() );
+    verifier = new Verifier( testDir.getAbsolutePath() );
 
-        List cliOptions = new ArrayList();
-        cliOptions.add( "-N" );
-        verifier.executeGoals( Lists.<String>newArrayList(
-                "clean",
-                "dependency:unpack-dependencies",
-                "generate-resources"));
+    List cliOptions = new ArrayList();
+    cliOptions.add( "-N" );
+    verifier.executeGoals( Lists.<String>newArrayList(
+        "clean",
+        "dependency:unpack-dependencies",
+        "generate-resources"));
 
-        verifier.verifyErrorFreeLog();
+    verifier.verifyErrorFreeLog();
 
-        verifier.resetStreams();
+    verifier.resetStreams();
 
-        File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-resources/hive-mojo");
+    File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-resources/hive-mojo");
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(hqlFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(hqlFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorTest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorTest.java b/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorTest.java
index c5f5ed7..a5374a0 100644
--- a/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorTest.java
+++ b/streams-plugins/streams-plugin-hive/src/test/java/org/apache/streams/plugins/test/StreamsHiveResourceGeneratorTest.java
@@ -19,25 +19,21 @@
 
 package org.apache.streams.plugins.test;
 
+import org.apache.streams.plugins.hive.StreamsHiveGenerationConfig;
+import org.apache.streams.plugins.hive.StreamsHiveResourceGenerator;
+
 import com.google.common.base.Predicate;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.common.io.Files;
-import org.apache.commons.io.FileUtils;
-import org.apache.streams.plugins.hive.StreamsHiveGenerationConfig;
-import org.apache.streams.plugins.hive.StreamsHiveResourceGenerator;
-import org.junit.Assert;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Collection;
-import java.util.Iterator;
-
-import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
+import javax.annotation.Nullable;
 
 /**
  * Test that Activity beans are compatible with the example activities in the spec.
@@ -45,79 +41,81 @@ import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
 @Ignore
 public class StreamsHiveResourceGeneratorTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsHiveResourceGeneratorTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsHiveResourceGeneratorTest.class);
 
-    public static final Predicate<File> hqlFilter = new Predicate<File>() {
-        @Override
-        public boolean apply(@Nullable File file) {
-            if( file.getName().endsWith(".hql") )
-                return true;
-            else return false;
-        }
-    };
+  public static final Predicate<File> hqlFilter = new Predicate<File>() {
+    @Override
+    public boolean apply(@Nullable File file) {
+      if ( file.getName().endsWith(".hql") ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  };
 
-    /**
-     * Tests that all example activities can be loaded into Activity beans
-     *
-     * @throws Exception
-     */
-    @Test
-    public void StreamsHiveResourceGenerator() throws Exception {
+  /**
+   * Tests that all example activities can be loaded into Activity beans.
+   *
+   * @throws Exception Exception
+   */
+  @Test
+  public void testStreamsHiveResourceGenerator() throws Exception {
 
-        StreamsHiveGenerationConfig config = new StreamsHiveGenerationConfig();
+    StreamsHiveGenerationConfig config = new StreamsHiveGenerationConfig();
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
 
-        config.setSourceDirectory(sourceDirectory);
+    config.setSourceDirectory(sourceDirectory);
 
-        config.setTargetDirectory("target/generated-resources/test");
+    config.setTargetDirectory("target/generated-resources/test");
 
-        config.setExclusions(Sets.newHashSet("attachments"));
+    config.setExclusions(Sets.newHashSet("attachments"));
 
-        config.setMaxDepth(2);
+    config.setMaxDepth(2);
 
-        StreamsHiveResourceGenerator streamsHiveResourceGenerator = new StreamsHiveResourceGenerator(config);
-        streamsHiveResourceGenerator.run();
+    StreamsHiveResourceGenerator streamsHiveResourceGenerator = new StreamsHiveResourceGenerator(config);
+    streamsHiveResourceGenerator.run();
 
-        File testOutput = config.getTargetDirectory();
+    File testOutput = config.getTargetDirectory();
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(hqlFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(hqlFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
 
-        /* TODO: figure out how to compare without AL header interfering
-        String expectedDirectory = "target/test-classes/expected";
-        File testExpected = new File( expectedDirectory );
+    /* TODO: figure out how to compare without AL header interfering
+    String expectedDirectory = "target/test-classes/expected";
+    File testExpected = new File( expectedDirectory );
 
-        Iterable<File> expectedIterator = Files.fileTreeTraverser().breadthFirstTraversal(testExpected)
-                .filter(hqlFilter);
-        Collection<File> expectedCollection = Lists.newArrayList(expectedIterator);
+    Iterable<File> expectedIterator = Files.fileTreeTraverser().breadthFirstTraversal(testExpected)
+            .filter(hqlFilter);
+    Collection<File> expectedCollection = Lists.newArrayList(expectedIterator);
 
-        int fails = 0;
+    int fails = 0;
 
-        Iterator<File> iterator = expectedCollection.iterator();
-        while( iterator.hasNext() ) {
-            File objectExpected = iterator.next();
-            String expectedEnd = dropSourcePathPrefix(objectExpected.getAbsolutePath(),  expectedDirectory);
-            File objectActual = new File(config.getTargetDirectory() + "/" + expectedEnd);
-            LOGGER.info("Comparing: {} and {}", objectExpected.getAbsolutePath(), objectActual.getAbsolutePath());
-            assert( objectActual.exists());
-            if( FileUtils.contentEquals(objectActual, objectExpected) == true ) {
-                LOGGER.info("Exact Match!");
-            } else {
-                LOGGER.info("No Match!");
-                fails++;
-            }
-        }
-        if( fails > 0 ) {
-            LOGGER.info("Fails: {}", fails);
-            Assert.fail();
+    Iterator<File> iterator = expectedCollection.iterator();
+    while( iterator.hasNext() ) {
+        File objectExpected = iterator.next();
+        String expectedEnd = dropSourcePathPrefix(objectExpected.getAbsolutePath(),  expectedDirectory);
+        File objectActual = new File(config.getTargetDirectory() + "/" + expectedEnd);
+        LOGGER.info("Comparing: {} and {}", objectExpected.getAbsolutePath(), objectActual.getAbsolutePath());
+        assert( objectActual.exists());
+        if( FileUtils.contentEquals(objectActual, objectExpected) == true ) {
+            LOGGER.info("Exact Match!");
+        } else {
+            LOGGER.info("No Match!");
+            fails++;
         }
-        */
-   }
+    }
+    if( fails > 0 ) {
+        LOGGER.info("Fails: {}", fails);
+        Assert.fail();
+    }
+    */
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigGenerationConfig.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigGenerationConfig.java b/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigGenerationConfig.java
index 2af8eeb..c4358a0 100644
--- a/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigGenerationConfig.java
+++ b/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigGenerationConfig.java
@@ -20,6 +20,7 @@
 package org.apache.streams.plugins.pig;
 
 import org.apache.streams.util.schema.GenerationConfig;
+
 import org.jsonschema2pojo.DefaultGenerationConfig;
 import org.jsonschema2pojo.util.URLUtil;
 
@@ -33,70 +34,73 @@ import java.util.List;
 import java.util.Set;
 
 /**
- * Configures StreamsHiveResourceGenerator
- *
- *
+ * Configures StreamsPigResourceGenerator.
  */
 public class StreamsPigGenerationConfig extends DefaultGenerationConfig implements GenerationConfig {
 
-    public String getSourceDirectory() {
-        return sourceDirectory;
-    }
-
-    public List<String> getSourcePaths() {
-        return sourcePaths;
-    }
-
-    private String sourceDirectory;
-    private List<String> sourcePaths = new ArrayList<String>();
-    private String targetDirectory;
-    private int maxDepth = 1;
-
-    public Set<String> getExclusions() {
-        return exclusions;
-    }
-
-    public void setExclusions(Set<String> exclusions) {
-        this.exclusions = exclusions;
+  public String getSourceDirectory() {
+    return sourceDirectory;
+  }
+
+  public List<String> getSourcePaths() {
+    return sourcePaths;
+  }
+
+  private String sourceDirectory;
+  private List<String> sourcePaths = new ArrayList<String>();
+  private String targetDirectory;
+  private int maxDepth = 1;
+
+  public Set<String> getExclusions() {
+    return exclusions;
+  }
+
+  public void setExclusions(Set<String> exclusions) {
+    this.exclusions = exclusions;
+  }
+
+  private Set<String> exclusions = new HashSet<String>();
+
+  public int getMaxDepth() {
+    return maxDepth;
+  }
+
+  public void setSourceDirectory(String sourceDirectory) {
+    this.sourceDirectory = sourceDirectory;
+  }
+
+  public void setSourcePaths(List<String> sourcePaths) {
+    this.sourcePaths = sourcePaths;
+  }
+
+  public void setTargetDirectory(String targetDirectory) {
+    this.targetDirectory = targetDirectory;
+  }
+
+  @Override
+  public File getTargetDirectory() {
+    return new File(targetDirectory);
+  }
+
+  /**
+   * get all sources.
+   * @return Iterator of URL
+   */
+  @Override
+  public Iterator<URL> getSource() {
+    if (null != sourceDirectory) {
+      return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
     }
-
-    private Set<String> exclusions = new HashSet<String>();
-
-    public int getMaxDepth() {
-        return maxDepth;
+    List<URL> sourceUrls = new ArrayList<URL>();
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      for (String source : sourcePaths) {
+        sourceUrls.add(URLUtil.parseURL(source));
+      }
     }
+    return sourceUrls.iterator();
+  }
 
-    public void setSourceDirectory(String sourceDirectory) {
-        this.sourceDirectory = sourceDirectory;
-    }
-
-    public void setSourcePaths(List<String> sourcePaths) {
-        this.sourcePaths = sourcePaths;
-    }
-
-    public void setTargetDirectory(String targetDirectory) {
-        this.targetDirectory = targetDirectory;
-    }
-
-    @Override
-    public File getTargetDirectory() {
-        return new File(targetDirectory);
-    }
-
-    @Override
-    public Iterator<URL> getSource() {
-        if (null != sourceDirectory) {
-            return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
-        }
-        List<URL> sourceURLs = new ArrayList<URL>();
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            for (String source : sourcePaths) {
-                sourceURLs.add(URLUtil.parseURL(source));
-            }
-        return sourceURLs.iterator();
-    }
-
-    public void setMaxDepth(int maxDepth) {
-        this.maxDepth = maxDepth;
-    }
+  public void setMaxDepth(int maxDepth) {
+    this.maxDepth = maxDepth;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGenerator.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGenerator.java b/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGenerator.java
index 465cdb2..039f45b 100644
--- a/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGenerator.java
+++ b/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGenerator.java
@@ -19,12 +19,6 @@
 
 package org.apache.streams.plugins.pig;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
-import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
 import org.apache.streams.util.schema.FieldType;
 import org.apache.streams.util.schema.FieldUtil;
 import org.apache.streams.util.schema.FileUtil;
@@ -32,6 +26,13 @@ import org.apache.streams.util.schema.GenerationConfig;
 import org.apache.streams.util.schema.Schema;
 import org.apache.streams.util.schema.SchemaStore;
 import org.apache.streams.util.schema.SchemaStoreImpl;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Joiner;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
 import org.jsonschema2pojo.util.URLUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -49,276 +50,312 @@ import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
 import static org.apache.streams.util.schema.FileUtil.swapExtension;
 import static org.apache.streams.util.schema.FileUtil.writeFile;
 
+/**
+ * Embed within your own java code
+ *
+ * <p/>
+ * StreamsPigGenerationConfig config = new StreamsPigGenerationConfig();
+ * config.setSourceDirectory("src/main/jsonschema");
+ * config.setTargetDirectory("target/generated-resources");
+ * StreamsPigResourceGenerator generator = new StreamsPigResourceGenerator(config);
+ * generator.run();
+ *
+ */
 public class StreamsPigResourceGenerator implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPigResourceGenerator.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPigResourceGenerator.class);
 
-    private final static String LS = System.getProperty("line.separator");
+  private static final String LS = System.getProperty("line.separator");
 
-    private StreamsPigGenerationConfig config;
+  private StreamsPigGenerationConfig config;
 
-    private SchemaStore schemaStore = new SchemaStoreImpl();
+  private SchemaStore schemaStore = new SchemaStoreImpl();
 
-    private int currentDepth = 0;
+  private int currentDepth = 0;
 
-    public static void main(String[] args) {
-        StreamsPigGenerationConfig config = new StreamsPigGenerationConfig();
+  /**
+   * Run from CLI without Maven
+   *
+   * <p/>
+   * java -jar streams-plugin-pig-jar-with-dependencies.jar StreamsPigResourceGenerator src/main/jsonschema target/generated-resources
+   *
+   * @param args [sourceDirectory, targetDirectory]
+   * */
+  public static void main(String[] args) {
+    StreamsPigGenerationConfig config = new StreamsPigGenerationConfig();
 
-        String sourceDirectory = "src/main/jsonschema";
-        String targetDirectory = "target/generated-resources/pig-cli";
+    String sourceDirectory = "src/main/jsonschema";
+    String targetDirectory = "target/generated-resources/pig-cli";
 
-        if (args.length > 0)
-            sourceDirectory = args[0];
-        if (args.length > 1)
-            targetDirectory = args[1];
-
-        config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
+    if ( args.length > 0 ) {
+      sourceDirectory = args[0];
+    }
+    if ( args.length > 1 ) {
+      targetDirectory = args[1];
+    }
 
-        StreamsPigResourceGenerator streamsPigResourceGenerator = new StreamsPigResourceGenerator(config);
-        streamsPigResourceGenerator.run();
+    config.setSourceDirectory(sourceDirectory);
+    config.setTargetDirectory(targetDirectory);
 
-    }
+    StreamsPigResourceGenerator streamsPigResourceGenerator = new StreamsPigResourceGenerator(config);
+    streamsPigResourceGenerator.run();
 
-    public StreamsPigResourceGenerator(StreamsPigGenerationConfig config) {
-        this.config = config;
-    }
+  }
 
-    public void run() {
+  public StreamsPigResourceGenerator(StreamsPigGenerationConfig config) {
+    this.config = config;
+  }
 
-        checkNotNull(config);
+  @Override
+  public void run() {
 
-        generate(config);
+    checkNotNull(config);
 
-    }
+    generate(config);
 
-    public void generate(StreamsPigGenerationConfig config) {
+  }
 
-        LinkedList<File> sourceFiles = new LinkedList<File>();
+  /**
+   * run generate using supplied StreamsPigGenerationConfig.
+   * @param config StreamsPigGenerationConfig
+   */
+  public void generate(StreamsPigGenerationConfig config) {
 
-        for (Iterator<URL> sources = config.getSource(); sources.hasNext(); ) {
-            URL source = sources.next();
-            sourceFiles.add(URLUtil.getFileFromURL(source));
-        }
+    LinkedList<File> sourceFiles = new LinkedList<File>();
 
-        LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
+    for (Iterator<URL> sources = config.getSource(); sources.hasNext(); ) {
+      URL source = sources.next();
+      sourceFiles.add(URLUtil.getFileFromURL(source));
+    }
 
-        FileUtil.resolveRecursive((GenerationConfig) config, sourceFiles);
+    LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
 
-        LOGGER.info("Resolved {} schema files:", sourceFiles.size());
+    FileUtil.resolveRecursive((GenerationConfig) config, sourceFiles);
 
-        for (Iterator<File> iterator = sourceFiles.iterator(); iterator.hasNext(); ) {
-            File item = iterator.next();
-            schemaStore.create(item.toURI());
-        }
+    LOGGER.info("Resolved {} schema files:", sourceFiles.size());
 
-        LOGGER.info("Identified {} objects:", schemaStore.getSize());
+    for (Iterator<File> iterator = sourceFiles.iterator(); iterator.hasNext(); ) {
+      File item = iterator.next();
+      schemaStore.create(item.toURI());
+    }
 
-        for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
-            Schema schema = schemaIterator.next();
-            currentDepth = 0;
-            if (schema.getURI().getScheme().equals("file")) {
-                String inputFile = schema.getURI().getPath();
-                String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-                for (String sourcePath : config.getSourcePaths()) {
-                    resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
-                }
-                String outputFile = config.getTargetDirectory() + "/" + swapExtension(resourcePath, "json", "pig");
+    LOGGER.info("Identified {} objects:", schemaStore.getSize());
 
-                LOGGER.info("Processing {}:", resourcePath);
+    for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
+      Schema schema = schemaIterator.next();
+      currentDepth = 0;
+      if (schema.getUri().getScheme().equals("file")) {
+        String inputFile = schema.getUri().getPath();
+        String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+        for (String sourcePath : config.getSourcePaths()) {
+          resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
+        }
+        String outputFile = config.getTargetDirectory() + "/" + swapExtension(resourcePath, "json", "pig");
 
-                String resourceId = schemaSymbol(schema);
+        LOGGER.info("Processing {}:", resourcePath);
 
-                String resourceContent = generateResource(schema, resourceId);
+        String resourceId = schemaSymbol(schema);
 
-                writeFile(outputFile, resourceContent);
+        String resourceContent = generateResource(schema, resourceId);
 
-                LOGGER.info("Wrote {}:", outputFile);
-            }
-        }
-    }
+        writeFile(outputFile, resourceContent);
 
-    public String generateResource(Schema schema, String resourceId) {
-        StringBuilder resourceBuilder = new StringBuilder();
-        resourceBuilder.append(pigEscape(resourceId));
-        resourceBuilder.append(" = ");
-        resourceBuilder.append("LOAD '' USING JsonLoader('");
-        resourceBuilder = appendRootObject(resourceBuilder, schema, resourceId, ':');
-        resourceBuilder.append("');");
-        return resourceBuilder.toString();
+        LOGGER.info("Wrote {}:", outputFile);
+      }
     }
-
-    public StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId, Character seperator) {
-        ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
-        if (propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0) {
-            builder = appendPropertiesNode(builder, schema, propertiesNode, seperator);
-        }
-        return builder;
+  }
+
+  /**
+   * generateResource String from schema and resourceId.
+   * @param schema Schema
+   * @param resourceId String
+   * @return mapping
+   */
+  public String generateResource(Schema schema, String resourceId) {
+    StringBuilder resourceBuilder = new StringBuilder();
+    resourceBuilder.append(pigEscape(resourceId));
+    resourceBuilder.append(" = ");
+    resourceBuilder.append("LOAD '' USING JsonLoader('");
+    resourceBuilder = appendRootObject(resourceBuilder, schema, resourceId, ':');
+    resourceBuilder.append("');");
+    return resourceBuilder.toString();
+  }
+
+  protected StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId, Character seperator) {
+    ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
+    if (propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0) {
+      builder = appendPropertiesNode(builder, schema, propertiesNode, seperator);
     }
-
-    private StringBuilder appendPropertiesNode(StringBuilder builder, Schema schema, ObjectNode propertiesNode, Character seperator) {
-        checkNotNull(builder);
-        checkNotNull(propertiesNode);
-        Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
-        Joiner joiner = Joiner.on(", ").skipNulls();
-        List<String> fieldStrings = Lists.newArrayList();
-        for( ; fields.hasNext(); ) {
-            Map.Entry<String, JsonNode> field = fields.next();
-            String fieldId = field.getKey();
-            if( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
-                ObjectNode fieldNode = (ObjectNode) field.getValue();
-                FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
-                if (fieldType != null ) {
-                    switch (fieldType) {
-                        case ARRAY:
-                            ObjectNode resolvedItems = schemaStore.resolveItems(schema, fieldNode, fieldId);
-                            if( resolvedItems != null && currentDepth <= config.getMaxDepth()) {
-                                StringBuilder arrayItemsBuilder = appendArrayItems(new StringBuilder(), schema, fieldId, resolvedItems, seperator);
-                                if( !Strings.isNullOrEmpty(arrayItemsBuilder.toString())) {
-                                    fieldStrings.add(arrayItemsBuilder.toString());
-                                }
-                            }
-                            break;
-                        case OBJECT:
-                            ObjectNode childProperties = schemaStore.resolveProperties(schema, fieldNode, fieldId);
-                            if( currentDepth < config.getMaxDepth()) {
-                                StringBuilder structFieldBuilder = appendStructField(new StringBuilder(), schema, fieldId, childProperties, seperator);
-                                if( !Strings.isNullOrEmpty(structFieldBuilder.toString())) {
-                                    fieldStrings.add(structFieldBuilder.toString());
-                                }
-                            }
-                            break;
-                        default:
-                            StringBuilder valueFieldBuilder = appendValueField(new StringBuilder(), schema, fieldId, fieldType, seperator);
-                            if( !Strings.isNullOrEmpty(valueFieldBuilder.toString())) {
-                                fieldStrings.add(valueFieldBuilder.toString());
-                            }
-                    }
+    return builder;
+  }
+
+  private StringBuilder appendPropertiesNode(StringBuilder builder, Schema schema, ObjectNode propertiesNode, Character seperator) {
+    checkNotNull(builder);
+    checkNotNull(propertiesNode);
+    Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
+    Joiner joiner = Joiner.on(", ").skipNulls();
+    List<String> fieldStrings = Lists.newArrayList();
+    for ( ; fields.hasNext(); ) {
+      Map.Entry<String, JsonNode> field = fields.next();
+      String fieldId = field.getKey();
+      if ( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
+        ObjectNode fieldNode = (ObjectNode) field.getValue();
+        FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
+        if (fieldType != null ) {
+          switch (fieldType) {
+            case ARRAY:
+              ObjectNode resolvedItems = schemaStore.resolveItems(schema, fieldNode, fieldId);
+              if ( resolvedItems != null && currentDepth <= config.getMaxDepth()) {
+                StringBuilder arrayItemsBuilder = appendArrayItems(new StringBuilder(), schema, fieldId, resolvedItems, seperator);
+                if ( !Strings.isNullOrEmpty(arrayItemsBuilder.toString())) {
+                  fieldStrings.add(arrayItemsBuilder.toString());
+                }
+              }
+              break;
+            case OBJECT:
+              ObjectNode childProperties = schemaStore.resolveProperties(schema, fieldNode, fieldId);
+              if ( currentDepth < config.getMaxDepth()) {
+                StringBuilder structFieldBuilder = appendStructField(new StringBuilder(), schema, fieldId, childProperties, seperator);
+                if ( !Strings.isNullOrEmpty(structFieldBuilder.toString())) {
+                  fieldStrings.add(structFieldBuilder.toString());
                 }
-            }
+              }
+              break;
+            default:
+              StringBuilder valueFieldBuilder = appendValueField(new StringBuilder(), schema, fieldId, fieldType, seperator);
+              if ( !Strings.isNullOrEmpty(valueFieldBuilder.toString())) {
+                fieldStrings.add(valueFieldBuilder.toString());
+              }
+          }
         }
-        joiner.appendTo(builder, fieldStrings);
-        Preconditions.checkNotNull(builder);
-        return builder;
+      }
     }
-
-    private StringBuilder appendValueField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        builder.append(pigEscape(fieldId));
-        builder.append(seperator);
-        builder.append(pigType(fieldType));
-        return builder;
-    }
-
-    public StringBuilder appendArrayItems(StringBuilder builder, Schema schema, String fieldId, ObjectNode itemsNode, Character seperator) {
-        // not safe to append nothing
-        checkNotNull(builder);
-        if( itemsNode == null ) return builder;
-        FieldType itemType = FieldUtil.determineFieldType(itemsNode);
-        try {
-            switch( itemType ) {
-                case OBJECT:
-                    builder = appendArrayObject(builder, schema, fieldId, itemsNode, seperator);
-                    break;
-                case ARRAY:
-                    ObjectNode subArrayItems = (ObjectNode) itemsNode.get("items");
-                    builder = appendArrayItems(builder, schema, fieldId, subArrayItems, seperator);
-                    break;
-                default:
-                    builder = appendArrayField(builder, schema, fieldId, itemType, seperator);
-            }
-        } catch (Exception e) {
-            LOGGER.warn("No item type resolvable for {}", fieldId);
-        }
-
-        checkNotNull(builder);
-        return builder;
+    joiner.appendTo(builder, fieldStrings);
+    Preconditions.checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendValueField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    builder.append(pigEscape(fieldId));
+    builder.append(seperator);
+    builder.append(pigType(fieldType));
+    return builder;
+  }
+
+  protected StringBuilder appendArrayItems(StringBuilder builder, Schema schema, String fieldId, ObjectNode itemsNode, Character seperator) {
+    // not safe to append nothing
+    checkNotNull(builder);
+    if ( itemsNode == null ) {
+      return builder;
     }
-
-    private StringBuilder appendArrayField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        checkNotNull(fieldId);
-        builder.append("{t: (");
-        builder.append(pigEscape(fieldId));
-        builder.append(seperator);
-        builder.append(pigType(fieldType));
-        builder.append(")}");
-        checkNotNull(builder);
-        return builder;
+    FieldType itemType = FieldUtil.determineFieldType(itemsNode);
+    try {
+      switch ( itemType ) {
+        case OBJECT:
+          builder = appendArrayObject(builder, schema, fieldId, itemsNode, seperator);
+          break;
+        case ARRAY:
+          ObjectNode subArrayItems = (ObjectNode) itemsNode.get("items");
+          builder = appendArrayItems(builder, schema, fieldId, subArrayItems, seperator);
+          break;
+        default:
+          builder = appendArrayField(builder, schema, fieldId, itemType, seperator);
+      }
+    } catch (Exception ex) {
+      LOGGER.warn("No item type resolvable for {}", fieldId);
     }
 
-    private StringBuilder appendArrayObject(StringBuilder builder, Schema schema, String fieldId, ObjectNode fieldNode, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        checkNotNull(fieldId);
-        checkNotNull(fieldNode);
-        ObjectNode propertiesNode = schemaStore.resolveProperties(schema, fieldNode, fieldId);
-        if( propertiesNode.size() > 0 ) {
-            builder.append("{t: (");
-            builder = appendStructField(builder, schema, "", propertiesNode, ':');
-            builder.append(")}");
-        }
-        checkNotNull(builder);
-        return builder;
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendArrayField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    checkNotNull(fieldId);
+    builder.append("{t: (");
+    builder.append(pigEscape(fieldId));
+    builder.append(seperator);
+    builder.append(pigType(fieldType));
+    builder.append(")}");
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendArrayObject(StringBuilder builder, Schema schema, String fieldId, ObjectNode fieldNode, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    checkNotNull(fieldId);
+    checkNotNull(fieldNode);
+    ObjectNode propertiesNode = schemaStore.resolveProperties(schema, fieldNode, fieldId);
+    if ( propertiesNode.size() > 0 ) {
+      builder.append("{t: (");
+      builder = appendStructField(builder, schema, "", propertiesNode, ':');
+      builder.append(")}");
     }
+    checkNotNull(builder);
+    return builder;
+  }
 
-    private StringBuilder appendStructField(StringBuilder builder, Schema schema, String fieldId, ObjectNode propertiesNode, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        checkNotNull(propertiesNode);
+  private StringBuilder appendStructField(StringBuilder builder, Schema schema, String fieldId, ObjectNode propertiesNode, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    checkNotNull(propertiesNode);
 
-        if( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0 ) {
+    if ( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0 ) {
 
-            currentDepth += 1;
+      currentDepth += 1;
 
-            if( !Strings.isNullOrEmpty(fieldId)) {
-                builder.append(pigEscape(fieldId));
-                builder.append(seperator);
-                builder.append("(");
-                builder = appendPropertiesNode(builder, schema, propertiesNode, ':');
-                builder.append(")");
-            }
+      if ( !Strings.isNullOrEmpty(fieldId)) {
+        builder.append(pigEscape(fieldId));
+        builder.append(seperator);
+        builder.append("(");
+        builder = appendPropertiesNode(builder, schema, propertiesNode, ':');
+        builder.append(")");
+      }
 
-            currentDepth -= 1;
+      currentDepth -= 1;
 
-        }
-        checkNotNull(builder);
-        return builder;
     }
-
-    private static String pigEscape( String fieldId ) {
-        return fieldId;
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private static String pigEscape( String fieldId ) {
+    return fieldId;
+  }
+
+  private static String pigType( FieldType fieldType ) {
+    switch ( fieldType ) {
+      case STRING:
+        return "chararray";
+      case INTEGER:
+        return "int";
+      case NUMBER:
+        return "double";
+      case OBJECT:
+        return "tuple";
+      default:
+        return fieldType.name().toLowerCase();
     }
+  }
 
-    private static String pigType( FieldType fieldType ) {
-        switch( fieldType ) {
-            case STRING:
-                return "chararray";
-            case INTEGER:
-                return "int";
-            case NUMBER:
-                return "double";
-            case OBJECT:
-                return "tuple";
-            default:
-                return fieldType.name().toLowerCase();
-        }
+  private String schemaSymbol( Schema schema ) {
+    if (schema == null) {
+      return null;
     }
-
-    private String schemaSymbol( Schema schema ) {
-        if (schema == null) return null;
-        // this needs to return whatever
-        if (schema.getURI().getScheme().equals("file")) {
-            String inputFile = schema.getURI().getPath();
-            String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-            for (String sourcePath : config.getSourcePaths()) {
-                resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
-            }
-            return dropExtension(resourcePath).replace("/", "_").replace("-", "");
-        } else {
-            return "IDK";
-        }
+    // this needs to return whatever
+    if (schema.getUri().getScheme().equals("file")) {
+      String inputFile = schema.getUri().getPath();
+      String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+      for (String sourcePath : config.getSourcePaths()) {
+        resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
+      }
+      return dropExtension(resourcePath).replace("/", "_").replace("-", "");
+    } else {
+      return "IDK";
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGeneratorMojo.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGeneratorMojo.java b/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGeneratorMojo.java
index 1d2673f..b1087c4 100644
--- a/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGeneratorMojo.java
+++ b/streams-plugins/streams-plugin-pig/src/main/java/org/apache/streams/plugins/pig/StreamsPigResourceGeneratorMojo.java
@@ -34,47 +34,55 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.util.List;
 
-@Mojo(  name = "generate-resources",
-        defaultPhase = LifecyclePhase.GENERATE_RESOURCES
-)
-@Execute(   goal = "generate-resources",
-            phase = LifecyclePhase.GENERATE_RESOURCES
-)
+@Mojo (
+    name = "generate-resources",
+    defaultPhase = LifecyclePhase.GENERATE_RESOURCES
+    )
+@Execute (
+    goal = "generate-resources",
+    phase = LifecyclePhase.GENERATE_RESOURCES
+    )
 public class StreamsPigResourceGeneratorMojo extends AbstractMojo {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPigResourceGeneratorMojo.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPigResourceGeneratorMojo.class);
 
-    private volatile MojoFailureException mojoFailureException;
+  private volatile MojoFailureException mojoFailureException;
 
-    @Component
-    private MavenProject project;
+  @Component
+  private MavenProject project;
 
-    @Parameter( defaultValue = "${project.basedir}", readonly = true )
-    private File basedir;
+  @Parameter( defaultValue = "${project.basedir}", readonly = true )
+  private File basedir;
 
-    @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
-    public String sourceDirectory;
+  @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
+  public String sourceDirectory;
 
-    @Parameter( readonly = true ) // Maven 3 only
-    public List<String> sourcePaths;
+  @Parameter( readonly = true ) // Maven 3 only
+  public List<String> sourcePaths;
 
-    @Parameter(defaultValue = "target/generated-resources/pig", readonly = true)
-    public String targetDirectory;
+  @Parameter(defaultValue = "target/generated-resources/pig", readonly = true)
+  public String targetDirectory;
 
-    public void execute() throws MojoExecutionException, MojoFailureException {
+  /**
+   * execute StreamsPigResourceGeneratorMojo.
+   * @throws MojoExecutionException MojoExecutionException
+   * @throws MojoFailureException MojoFailureException
+   */
+  public void execute() throws MojoExecutionException, MojoFailureException {
 
-        //addProjectDependenciesToClasspath();
+    //addProjectDependenciesToClasspath();
 
-        StreamsPigGenerationConfig config = new StreamsPigGenerationConfig();
+    StreamsPigGenerationConfig config = new StreamsPigGenerationConfig();
 
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            config.setSourcePaths(sourcePaths);
-        else
-            config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
-
-        StreamsPigResourceGenerator streamsPigResourceGenerator = new StreamsPigResourceGenerator(config);
-        streamsPigResourceGenerator.run();
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      config.setSourcePaths(sourcePaths);
+    } else {
+      config.setSourceDirectory(sourceDirectory);
     }
+    config.setTargetDirectory(targetDirectory);
+
+    StreamsPigResourceGenerator streamsPigResourceGenerator = new StreamsPigResourceGenerator(config);
+    streamsPigResourceGenerator.run();
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pig/src/site/markdown/index.md
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pig/src/site/markdown/index.md b/streams-plugins/streams-plugin-pig/src/site/markdown/index.md
index 7157023..9cf39ef 100644
--- a/streams-plugins/streams-plugin-pig/src/site/markdown/index.md
+++ b/streams-plugins/streams-plugin-pig/src/site/markdown/index.md
@@ -22,7 +22,7 @@ Embed within your own java code
     StreamsPigGenerationConfig config = new StreamsPigGenerationConfig();
     config.setSourceDirectory("src/main/jsonschema");
     config.setTargetDirectory("target/generated-resources");
-    StreamsPigGenerationConfig generator = new StreamsPigGenerationConfig(config);
+    StreamsPigResourceGenerator generator = new StreamsPigResourceGenerator(config);
     generator.run();
   
 ##### CLI

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorCLITest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorCLITest.java b/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorCLITest.java
index 55daaf2..6802667 100644
--- a/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorCLITest.java
+++ b/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorCLITest.java
@@ -19,9 +19,10 @@
 
 package org.apache.streams.plugins.pig.test;
 
+import org.apache.streams.plugins.pig.StreamsPigResourceGenerator;
+
 import com.google.common.collect.Lists;
 import com.google.common.io.Files;
-import org.apache.streams.plugins.pig.StreamsPigResourceGenerator;
 import org.junit.Test;
 
 import java.io.File;
@@ -31,28 +32,28 @@ import java.util.List;
 import static org.apache.streams.plugins.pig.test.StreamsPigResourceGeneratorTest.pigFilter;
 
 /**
- * Created by sblackmon on 5/5/16.
+ * Test whether StreamsPigResourceGeneratorCLI generates resources.
  */
 public class StreamsPigResourceGeneratorCLITest {
 
-    @Test
-    public void testStreamsPigResourceGeneratorCLI() throws Exception {
+  @Test
+  public void testStreamsPigResourceGeneratorCLI() throws Exception {
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
-        String targetDirectory = "target/generated-resources/pig-cli";
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    String targetDirectory = "target/generated-resources/pig-cli";
 
-        List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
-        StreamsPigResourceGenerator.main(argsList.toArray(new String[0]));
+    List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
+    StreamsPigResourceGenerator.main(argsList.toArray(new String[0]));
 
-        File testOutput = new File(targetDirectory);
+    File testOutput = new File(targetDirectory);
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(pigFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(pigFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorMojoIT.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorMojoIT.java b/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorMojoIT.java
index a584774..5cd612f 100644
--- a/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorMojoIT.java
+++ b/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorMojoIT.java
@@ -36,48 +36,47 @@ import java.util.List;
 import static org.apache.streams.plugins.pig.test.StreamsPigResourceGeneratorTest.pigFilter;
 
 /**
- * Tests that streams-plugin-hive running via maven generates hql resources
+ * Tests that streams-plugin-pig running via maven generates pig resources.
  */
 public class StreamsPigResourceGeneratorMojoIT extends TestCase {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPigResourceGeneratorMojoIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPigResourceGeneratorMojoIT.class);
 
-    protected void setUp() throws Exception
-    {
-        // required for mojo lookups to work
-        super.setUp();
-    }
+  protected void setUp() throws Exception {
+    // required for mojo lookups to work
+    super.setUp();
+  }
 
 
-    @Test
-    public void testStreamsPigResourceGeneratorMojo() throws Exception {
+  @Test
+  public void testStreamsPigResourceGeneratorMojo() throws Exception {
 
-        File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-pig" );
+    File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-pig" );
 
-        Verifier verifier;
+    Verifier verifier;
 
-        verifier = new Verifier( testDir.getAbsolutePath() );
+    verifier = new Verifier( testDir.getAbsolutePath() );
 
-        List cliOptions = new ArrayList();
-        cliOptions.add( "-N" );
-        verifier.executeGoals( Lists.<String>newArrayList(
-                "clean",
-                "dependency:unpack-dependencies",
-                "generate-resources"));
+    List cliOptions = new ArrayList();
+    cliOptions.add( "-N" );
+    verifier.executeGoals( Lists.<String>newArrayList(
+        "clean",
+        "dependency:unpack-dependencies",
+        "generate-resources"));
 
-        verifier.verifyErrorFreeLog();
+    verifier.verifyErrorFreeLog();
 
-        verifier.resetStreams();
+    verifier.resetStreams();
 
-        File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-resources/pig-mojo");
+    File testOutput = new File(testDir.getAbsolutePath() + "/target/generated-resources/pig-mojo");
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(pigFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(pigFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorTest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorTest.java b/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorTest.java
index 82284dc..a51778f 100644
--- a/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorTest.java
+++ b/streams-plugins/streams-plugin-pig/src/test/java/org/apache/streams/plugins/pig/test/StreamsPigResourceGeneratorTest.java
@@ -19,102 +19,100 @@
 
 package org.apache.streams.plugins.pig.test;
 
+import org.apache.streams.plugins.pig.StreamsPigGenerationConfig;
+import org.apache.streams.plugins.pig.StreamsPigResourceGenerator;
+
 import com.google.common.base.Predicate;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.common.io.Files;
-import org.apache.commons.io.FileUtils;
-import org.apache.streams.plugins.pig.StreamsPigGenerationConfig;
-import org.apache.streams.plugins.pig.StreamsPigResourceGenerator;
-import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
 import java.io.File;
 import java.util.Collection;
-import java.util.Iterator;
-
-import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
+import javax.annotation.Nullable;
 
 /**
  * Test that Activity beans are compatible with the example activities in the spec.
  */
 public class StreamsPigResourceGeneratorTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPigResourceGeneratorTest.class);
-
-    public static final Predicate<File> pigFilter = new Predicate<File>() {
-        @Override
-        public boolean apply(@Nullable File file) {
-            if( file.getName().endsWith(".pig") )
-                return true;
-            else return false;
-        }
-    };
-
-    /**
-     * Tests that all example activities can be loaded into Activity beans
-     *
-     * @throws Exception
-     */
-    @Test
-    public void StreamsPigResourceGenerator() throws Exception {
-
-        StreamsPigGenerationConfig config = new StreamsPigGenerationConfig();
-
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
-
-        config.setSourceDirectory(sourceDirectory);
-
-        config.setTargetDirectory("target/generated-resources/pig");
-
-        config.setExclusions(Sets.newHashSet("attachments"));
-
-        config.setMaxDepth(2);
-
-        StreamsPigResourceGenerator streamsPigResourceGenerator = new StreamsPigResourceGenerator(config);
-        streamsPigResourceGenerator.run();
-
-        File testOutput = config.getTargetDirectory();
-
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
-
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(pigFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 133 );
-
-        // TODO: figure out how to do a match to a test resources that has an apache header.
-//        String expectedDirectory = "target/test-classes/expected";
-//        File testExpected = new File( expectedDirectory );
-//
-//        Iterable<File> expectedIterator = Files.fileTreeTraverser().breadthFirstTraversal(testExpected)
-//                .filter(pigFilter);
-//        Collection<File> expectedCollection = Lists.newArrayList(expectedIterator);
-//
-//        int fails = 0;
-//
-//        Iterator<File> iterator = expectedCollection.iterator();
-//        while( iterator.hasNext() ) {
-//            File objectExpected = iterator.next();
-//            String expectedEnd = dropSourcePathPrefix(objectExpected.getAbsolutePath(),  expectedDirectory);
-//            File objectActual = new File(config.getTargetDirectory() + "/" + expectedEnd);
-//            LOGGER.info("Comparing: {} and {}", objectExpected.getAbsolutePath(), objectActual.getAbsolutePath());
-//            assert( objectActual.exists());
-//            if( FileUtils(objectActual, objectExpected) == true ) {
-//                LOGGER.info("Exact Match!");
-//            } else {
-//                LOGGER.info("No Match!");
-//                fails++;
-//            }
-//        }
-//        if( fails > 0 ) {
-//            LOGGER.info("Fails: {}", fails);
-//            Assert.fail();
-//        }
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPigResourceGeneratorTest.class);
+
+  public static final Predicate<File> pigFilter = new Predicate<File>() {
+    @Override
+    public boolean apply(@Nullable File file) {
+      if ( file.getName().endsWith(".pig") ) {
+        return true;
+      } else {
+        return false;
+      }
     }
+  };
+
+  /**
+   * Tests that StreamsPigResourceGenerator via SDK generates pig resources.
+   *
+   * @throws Exception Exception
+   */
+  @Test
+  public void testStreamsPigResourceGenerator() throws Exception {
+
+    StreamsPigGenerationConfig config = new StreamsPigGenerationConfig();
+
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
+
+    config.setSourceDirectory(sourceDirectory);
+
+    config.setTargetDirectory("target/generated-resources/pig");
+
+    config.setExclusions(Sets.newHashSet("attachments"));
+
+    config.setMaxDepth(2);
+
+    StreamsPigResourceGenerator streamsPigResourceGenerator = new StreamsPigResourceGenerator(config);
+    streamsPigResourceGenerator.run();
+
+    File testOutput = config.getTargetDirectory();
+
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
+
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(pigFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 133 );
+
+    // TODO: figure out how to do a match to a test resources that has an apache header.
+    //        String expectedDirectory = "target/test-classes/expected";
+    //        File testExpected = new File( expectedDirectory );
+    //
+    //        Iterable<File> expectedIterator = Files.fileTreeTraverser().breadthFirstTraversal(testExpected)
+    //                .filter(pigFilter);
+    //        Collection<File> expectedCollection = Lists.newArrayList(expectedIterator);
+    //
+    //        int fails = 0;
+    //
+    //        Iterator<File> iterator = expectedCollection.iterator();
+    //        while( iterator.hasNext() ) {
+    //            File objectExpected = iterator.next();
+    //            String expectedEnd = dropSourcePathPrefix(objectExpected.getAbsolutePath(),  expectedDirectory);
+    //            File objectActual = new File(config.getTargetDirectory() + "/" + expectedEnd);
+    //            LOGGER.info("Comparing: {} and {}", objectExpected.getAbsolutePath(), objectActual.getAbsolutePath());
+    //            assert( objectActual.exists());
+    //            if( FileUtils(objectActual, objectExpected) == true ) {
+    //                LOGGER.info("Exact Match!");
+    //            } else {
+    //                LOGGER.info("No Match!");
+    //                fails++;
+    //            }
+    //        }
+    //        if( fails > 0 ) {
+    //            LOGGER.info("Fails: {}", fails);
+    //            Assert.fail();
+    //        }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoGenerationConfig.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoGenerationConfig.java b/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoGenerationConfig.java
index b25cc9f..b2b156f 100644
--- a/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoGenerationConfig.java
+++ b/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoGenerationConfig.java
@@ -19,16 +19,10 @@
 
 package org.apache.streams.plugins;
 
-import org.jsonschema2pojo.AnnotationStyle;
-import org.jsonschema2pojo.Annotator;
 import org.jsonschema2pojo.DefaultGenerationConfig;
-import org.jsonschema2pojo.GenerationConfig;
-import org.jsonschema2pojo.SourceType;
-import org.jsonschema2pojo.rules.RuleFactory;
 import org.jsonschema2pojo.util.URLUtil;
 
 import java.io.File;
-import java.io.FileFilter;
 import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -36,87 +30,88 @@ import java.util.Iterator;
 import java.util.List;
 
 /**
- * Created by sblackmon on 3/27/16.
+ * Configures StreamsPojoSourceGenerator.
  */
 public class StreamsPojoGenerationConfig extends DefaultGenerationConfig {
 
-    private String sourceDirectory;
-    private List<String> sourcePaths;
-    private String targetPackage;
-    private String targetDirectory;
-
-    public void setSourceDirectory(String sourceDirectory) {
-        this.sourceDirectory = sourceDirectory;
-    }
-
-    public void setSourcePaths(List<String> sourcePaths) {
-        this.sourcePaths = sourcePaths;
+  private String sourceDirectory;
+  private List<String> sourcePaths;
+  private String targetPackage;
+  private String targetDirectory;
+
+  public void setSourceDirectory(String sourceDirectory) {
+    this.sourceDirectory = sourceDirectory;
+  }
+
+  public void setSourcePaths(List<String> sourcePaths) {
+    this.sourcePaths = sourcePaths;
+  }
+
+  public void setTargetPackage(String targetPackage) {
+    this.targetPackage = targetPackage;
+  }
+
+  public void setTargetDirectory(String targetDirectory) {
+    this.targetDirectory = targetDirectory;
+  }
+
+  @Override
+  public String getTargetPackage() {
+    return targetPackage;
+  }
+
+  @Override
+  public File getTargetDirectory() {
+    return new File(targetDirectory);
+  }
+
+  @Override
+  public Iterator<URL> getSource() {
+    if (null != sourceDirectory) {
+      return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
     }
-
-    public void setTargetPackage(String targetPackage) {
-        this.targetPackage = targetPackage;
+    List<URL> sourceUrls = new ArrayList<URL>();
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      for (String source : sourcePaths) {
+        sourceUrls.add(URLUtil.parseURL(source));
+      }
     }
-
-    public void setTargetDirectory(String targetDirectory) {
-        this.targetDirectory = targetDirectory;
-    }
-
-    @Override
-    public String getTargetPackage() {
-        return targetPackage;
-    }
-
-    @Override
-    public File getTargetDirectory() {
-        return new File(targetDirectory);
-    }
-
-    @Override
-    public Iterator<URL> getSource() {
-        if (null != sourceDirectory) {
-            return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
-        }
-        List<URL> sourceURLs = new ArrayList<URL>();
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            for (String source : sourcePaths) {
-                sourceURLs.add(URLUtil.parseURL(source));
-            }
-        return sourceURLs.iterator();
-    }
-
-    @Override
-    public boolean isGenerateBuilders() {
-        return true;
-    }
-
-    @Override
-    public boolean isUseLongIntegers() {
-        return true;
-    }
-
-    @Override
-    public boolean isRemoveOldOutput() {
-        return true;
-    }
-
-    @Override
-    public boolean isUseJodaDates() {
-        return true;
-    }
-
-    @Override
-    public boolean isIncludeJsr303Annotations() {
-        return true;
-    }
-
-    @Override
-    public boolean isUseCommonsLang3() {
-        return true;
-    }
-
-//    @Override
-//    public boolean isIncludeAdditionalProperties() {
-//        return true;
-//    }
+    return sourceUrls.iterator();
+  }
+
+  @Override
+  public boolean isGenerateBuilders() {
+    return true;
+  }
+
+  @Override
+  public boolean isUseLongIntegers() {
+    return true;
+  }
+
+  @Override
+  public boolean isRemoveOldOutput() {
+    return true;
+  }
+
+  @Override
+  public boolean isUseJodaDates() {
+    return true;
+  }
+
+  @Override
+  public boolean isIncludeJsr303Annotations() {
+    return true;
+  }
+
+  @Override
+  public boolean isUseCommonsLang3() {
+    return true;
+  }
+
+  //    @Override
+  //    public boolean isIncludeAdditionalProperties() {
+  //        return true;
+  //    }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGenerator.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGenerator.java b/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGenerator.java
index 7115c60c..e5a516e 100644
--- a/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGenerator.java
+++ b/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGenerator.java
@@ -30,65 +30,85 @@ import java.nio.file.Paths;
 import java.nio.file.StandardOpenOption;
 
 /**
- * Created by sblackmon on 4/20/16.
+ * Embed within your own java code
+ *
+ * <p/>
+ * StreamsPojoGenerationConfig config = new StreamsPojoGenerationConfig();
+ * config.setSourceDirectory("src/main/jsonschema");
+ * config.setTargetDirectory("target/generated-sources/pojo");
+ * StreamsPojoSourceGenerator generator = new StreamsPojoSourceGenerator(config);
+ * generator.run();
+ *
  */
 public class StreamsPojoSourceGenerator implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGenerator.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGenerator.class);
 
-    private final static String LS = System.getProperty("line.separator");
+  private static final String LS = System.getProperty("line.separator");
 
-    private StreamsPojoGenerationConfig config;
+  private StreamsPojoGenerationConfig config;
 
-    public static void main(String[] args) {
-        StreamsPojoGenerationConfig config = new StreamsPojoGenerationConfig();
+  /**
+   * Run from CLI without Maven
+   *
+   * <p/>
+   * java -jar streams-plugin-pojo-jar-with-dependencies.jar StreamsPojoSourceGenerator src/main/jsonschema target/generated-sources
+   *
+   * @param args [sourceDirectory, targetDirectory, targetPackage]
+   * */
+  public static void main(String[] args) {
+    StreamsPojoGenerationConfig config = new StreamsPojoGenerationConfig();
 
-        String sourceDirectory = "src/main/jsonschema";
-        String targetDirectory = "target/generated-sources/pojo";
-        String targetPackage = "";
+    String sourceDirectory = "src/main/jsonschema";
+    String targetDirectory = "target/generated-sources/pojo";
+    String targetPackage = "";
 
-        if( args.length > 0 )
-            sourceDirectory = args[0];
-        if( args.length > 1 )
-            targetDirectory = args[1];
-        if( args.length > 2 )
-            targetPackage = args[2];
+    if ( args.length > 0 ) {
+      sourceDirectory = args[0];
+    }
+    if ( args.length > 1 ) {
+      targetDirectory = args[1];
+    }
+    if ( args.length > 2 ) {
+      targetPackage = args[2];
+    }
 
-        config.setSourceDirectory(sourceDirectory);
-        config.setTargetPackage(targetPackage);
-        config.setTargetDirectory(targetDirectory);
+    config.setSourceDirectory(sourceDirectory);
+    config.setTargetPackage(targetPackage);
+    config.setTargetDirectory(targetDirectory);
 
-        StreamsPojoSourceGenerator streamsPojoSourceGenerator = new StreamsPojoSourceGenerator(config);
-        streamsPojoSourceGenerator.run();
+    StreamsPojoSourceGenerator streamsPojoSourceGenerator = new StreamsPojoSourceGenerator(config);
+    streamsPojoSourceGenerator.run();
 
-        return;
-    }
+    return;
+  }
 
-    public StreamsPojoSourceGenerator(StreamsPojoGenerationConfig config) {
-        this.config = config;
-    }
+  public StreamsPojoSourceGenerator(StreamsPojoGenerationConfig config) {
+    this.config = config;
+  }
 
-    @Override
-    public void run() {
+  @Override
+  public void run() {
 
-        Preconditions.checkNotNull(config);
+    Preconditions.checkNotNull(config);
 
-        try {
-            Jsonschema2Pojo.generate(config);
-        } catch (Throwable e) {
-            LOGGER.error("{} {}", e.getClass(), e.getMessage());
-        }
+    try {
+      Jsonschema2Pojo.generate(config);
+    } catch (Throwable ex) {
+      LOGGER.error("{} {}", ex.getClass(), ex.getMessage());
     }
-
-    private void writeFile(String pojoFile, String pojoHive) {
-        try {
-            File path = new File(pojoFile);
-            File dir = path.getParentFile();
-            if( !dir.exists() )
-                dir.mkdirs();
-            Files.write(Paths.get(pojoFile), pojoHive.getBytes(), StandardOpenOption.CREATE_NEW);
-        } catch (Exception e) {
-            LOGGER.error("Write Exception: {}", e);
-        }
+  }
+
+  private void writeFile(String pojoFile, String pojoHive) {
+    try {
+      File path = new File(pojoFile);
+      File dir = path.getParentFile();
+      if ( !dir.exists() ) {
+        dir.mkdirs();
+      }
+      Files.write(Paths.get(pojoFile), pojoHive.getBytes(), StandardOpenOption.CREATE_NEW);
+    } catch (Exception ex) {
+      LOGGER.error("Write Exception: {}", ex);
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGeneratorMojo.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGeneratorMojo.java b/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGeneratorMojo.java
index c265bb6..7585e7b 100644
--- a/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGeneratorMojo.java
+++ b/streams-plugins/streams-plugin-pojo/src/main/java/org/apache/streams/plugins/StreamsPojoSourceGeneratorMojo.java
@@ -29,86 +29,81 @@ import org.apache.maven.plugins.annotations.LifecyclePhase;
 import org.apache.maven.plugins.annotations.Mojo;
 import org.apache.maven.plugins.annotations.Parameter;
 import org.apache.maven.project.MavenProject;
-import org.jsonschema2pojo.Jsonschema2Pojo;
-import org.jsonschema2pojo.maven.Jsonschema2PojoMojo;
 import org.jsonschema2pojo.maven.ProjectClasspath;
-import org.jsonschema2pojo.util.URLUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.nio.file.StandardOpenOption;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
 import java.util.List;
 
-import static org.apache.commons.lang.StringUtils.isNotBlank;
-
-@Mojo(  name = "generate-sources",
-        defaultPhase = LifecyclePhase.GENERATE_SOURCES
-)
-@Execute(   goal = "generate-sources",
-            phase = LifecyclePhase.GENERATE_SOURCES
-)
+@Mojo (
+    name = "generate-sources",
+    defaultPhase = LifecyclePhase.GENERATE_SOURCES
+    )
+@Execute (
+    goal = "generate-sources",
+    phase = LifecyclePhase.GENERATE_SOURCES
+    )
 public class StreamsPojoSourceGeneratorMojo extends AbstractMojo {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGeneratorMojo.class);
-
-    private volatile MojoFailureException mojoFailureException;
-
-    @Component
-    public MavenProject project;
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGeneratorMojo.class);
 
-    @Parameter( defaultValue = "${project.basedir}", readonly = true )
-    public File basedir;
+  private volatile MojoFailureException mojoFailureException;
 
-    @Parameter( defaultValue = "./src/main/jsonschema", readonly = true ) // Maven 3 only
-    public String sourceDirectory;
+  @Component
+  public MavenProject project;
 
-    @Parameter( readonly = true ) // Maven 3 only
-    public List<String> sourcePaths;
+  @Parameter( defaultValue = "${project.basedir}", readonly = true )
+  public File basedir;
 
-    @Parameter(defaultValue = "./target/generated-sources/pojo", readonly = true)
-    public String targetDirectory;
+  @Parameter( defaultValue = "./src/main/jsonschema", readonly = true ) // Maven 3 only
+  public String sourceDirectory;
 
-    @Parameter(readonly = true)
-    public String targetPackage;
+  @Parameter( readonly = true ) // Maven 3 only
+  public List<String> sourcePaths;
 
-    public void execute() throws MojoExecutionException, MojoFailureException {
+  @Parameter(defaultValue = "./target/generated-sources/pojo", readonly = true)
+  public String targetDirectory;
 
-        addProjectDependenciesToClasspath();
+  @Parameter(readonly = true)
+  public String targetPackage;
 
-        StreamsPojoGenerationConfig config = new StreamsPojoGenerationConfig();
+  /**
+   * execute StreamsPojoSourceGenerator.
+   * @throws MojoExecutionException MojoExecutionException
+   * @throws MojoFailureException MojoFailureException
+   */
+  public void execute() throws MojoExecutionException, MojoFailureException {
 
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            config.setSourcePaths(sourcePaths);
-        else
-            config.setSourceDirectory(sourceDirectory);
-        config.setTargetPackage(targetPackage);
-        config.setTargetDirectory(targetDirectory);
+    addProjectDependenciesToClasspath();
 
-        StreamsPojoSourceGenerator streamsPojoSourceGenerator = new StreamsPojoSourceGenerator(config);
-        streamsPojoSourceGenerator.run();
+    StreamsPojoGenerationConfig config = new StreamsPojoGenerationConfig();
 
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      config.setSourcePaths(sourcePaths);
+    } else {
+      config.setSourceDirectory(sourceDirectory);
     }
+    config.setTargetPackage(targetPackage);
+    config.setTargetDirectory(targetDirectory);
 
-    private void addProjectDependenciesToClasspath() {
+    StreamsPojoSourceGenerator streamsPojoSourceGenerator = new StreamsPojoSourceGenerator(config);
+    streamsPojoSourceGenerator.run();
 
-        try {
+  }
 
-            ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
-            ClassLoader newClassLoader = new ProjectClasspath().getClassLoader(project, oldClassLoader, getLog());
-            Thread.currentThread().setContextClassLoader(newClassLoader);
+  private void addProjectDependenciesToClasspath() {
 
-        } catch (DependencyResolutionRequiredException e) {
-            LOGGER.info("Skipping addition of project artifacts, there appears to be a dependecy resolution problem", e);
-        }
+    try {
 
+      ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
+      ClassLoader newClassLoader = new ProjectClasspath().getClassLoader(project, oldClassLoader, getLog());
+      Thread.currentThread().setContextClassLoader(newClassLoader);
+
+    } catch (DependencyResolutionRequiredException ex) {
+      LOGGER.info("Skipping addition of project artifacts, there appears to be a dependecy resolution problem", ex);
     }
 
+  }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorCLITest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorCLITest.java b/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorCLITest.java
index bb41032..4239a31 100644
--- a/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorCLITest.java
+++ b/streams-plugins/streams-plugin-pojo/src/test/java/org/apache/streams/plugins/test/StreamsPojoSourceGeneratorCLITest.java
@@ -19,9 +19,10 @@
 
 package org.apache.streams.plugins.test;
 
+import org.apache.streams.plugins.StreamsPojoSourceGenerator;
+
 import com.google.common.collect.Lists;
 import com.google.common.io.Files;
-import org.apache.streams.plugins.StreamsPojoSourceGenerator;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -33,30 +34,30 @@ import java.util.List;
 import static org.apache.streams.plugins.test.StreamsPojoSourceGeneratorTest.javaFilter;
 
 /**
- * Created by sblackmon on 5/5/16.
+ * Test whether StreamsPojoSourceGeneratorCLI generates source files.
  */
 public class StreamsPojoSourceGeneratorCLITest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGeneratorTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsPojoSourceGeneratorTest.class);
 
-    @Test
-    public void testStreamsPojoSourceGeneratorCLI() throws Exception {
+  @Test
+  public void testStreamsPojoSourceGeneratorCLI() throws Exception {
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
-        String targetDirectory = "target/generated-sources/test-cli";
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    String targetDirectory = "target/generated-sources/test-cli";
 
-        List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
-        StreamsPojoSourceGenerator.main(argsList.toArray(new String[0]));
+    List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
+    StreamsPojoSourceGenerator.main(argsList.toArray(new String[0]));
 
-        File testOutput = new File(targetDirectory);
+    File testOutput = new File(targetDirectory);
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(javaFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() > 133 );
-    }
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(javaFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() > 133 );
+  }
 }


[29/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailProvider.java b/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailProvider.java
index 5cc6fe7..e11628f 100644
--- a/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailProvider.java
+++ b/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailProvider.java
@@ -18,6 +18,13 @@
 
 package com.google.gmail.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProvider;
+import org.apache.streams.core.StreamsResultSet;
+
 import com.google.common.base.Preconditions;
 import com.google.gmail.GMailConfiguration;
 import com.googlecode.gmail4j.GmailClient;
@@ -26,12 +33,7 @@ import com.googlecode.gmail4j.http.HttpGmailConnection;
 import com.googlecode.gmail4j.javamail.ImapGmailClient;
 import com.googlecode.gmail4j.javamail.ImapGmailConnection;
 import com.googlecode.gmail4j.rss.RssGmailClient;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.DatumStatusCounter;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProvider;
-import org.apache.streams.core.StreamsResultSet;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -50,141 +52,141 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 
 /**
- * Created by sblackmon on 12/10/13.
+ * GMailProvider collects messages from GMail.
  */
 public class GMailProvider implements StreamsProvider, Serializable {
 
-    public final static String STREAMS_ID = "GMailProvider";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(GMailProvider.class);
+  public final static String STREAMS_ID = "GMailProvider";
 
-    private GMailConfiguration config;
+  private final static Logger LOGGER = LoggerFactory.getLogger(GMailProvider.class);
 
-    private Class klass;
+  private GMailConfiguration config;
 
-    public GMailConfiguration getConfig() {
-        return config;
-    }
+  private Class klass;
 
-    public void setConfig(GMailConfiguration config) {
-        this.config = config;
-    }
+  public GMailConfiguration getConfig() {
+    return config;
+  }
 
-    protected BlockingQueue inQueue = new LinkedBlockingQueue<>(10000);
+  public void setConfig(GMailConfiguration config) {
+    this.config = config;
+  }
 
-    protected volatile Queue<StreamsDatum> providerQueue = new ConcurrentLinkedQueue<>();
-    protected Future task;
+  protected BlockingQueue inQueue = new LinkedBlockingQueue<>(10000);
 
-    public BlockingQueue<Object> getInQueue() {
-        return inQueue;
-    }
+  protected volatile Queue<StreamsDatum> providerQueue = new ConcurrentLinkedQueue<>();
+  protected Future task;
 
-    protected GmailClient rssClient;
-    protected ImapGmailClient imapClient;
+  public BlockingQueue<Object> getInQueue() {
+    return inQueue;
+  }
 
-    private ExecutorService executor;
+  protected GmailClient rssClient;
+  protected ImapGmailClient imapClient;
 
-    private static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
-        return new ThreadPoolExecutor(nThreads, nThreads,
-                5000L, TimeUnit.MILLISECONDS,
-                new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
-    }
+  private ExecutorService executor;
 
-    public GMailProvider() {
-        this.config = new ComponentConfigurator<>(GMailConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("gmail"));
-    }
+  private static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
+    return new ThreadPoolExecutor(nThreads, nThreads,
+        5000L, TimeUnit.MILLISECONDS,
+        new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
+  }
 
-    public GMailProvider(GMailConfiguration config) {
-        this.config = config;
-    }
+  public GMailProvider() {
+    this.config = new ComponentConfigurator<>(GMailConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("gmail"));
+  }
 
-    public GMailProvider(Class klass) {
-        this.config = new ComponentConfigurator<>(GMailConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("gmail"));
-        this.klass = klass;
-    }
+  public GMailProvider(GMailConfiguration config) {
+    this.config = config;
+  }
 
-    public GMailProvider(GMailConfiguration config, Class klass) {
-        this.config = config;
-        this.klass = klass;
-    }
+  public GMailProvider(Class klass) {
+    this.config = new ComponentConfigurator<>(GMailConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("gmail"));
+    this.klass = klass;
+  }
 
-    protected DatumStatusCounter countersTotal = new DatumStatusCounter();
-    protected DatumStatusCounter countersCurrent = new DatumStatusCounter();
+  public GMailProvider(GMailConfiguration config, Class klass) {
+    this.config = config;
+    this.klass = klass;
+  }
 
-    @Override
-    public String getId() {
-        return "GMailProvider";
-    }
+  protected DatumStatusCounter countersTotal = new DatumStatusCounter();
+  protected DatumStatusCounter countersCurrent = new DatumStatusCounter();
 
-    @Override
-    public void startStream() {
+  @Override
+  public String getId() {
+    return "GMailProvider";
+  }
 
-        task = executor.submit(new GMailImapProviderTask(this));
+  @Override
+  public void startStream() {
 
-    }
+    task = executor.submit(new GMailImapProviderTask(this));
 
-    @Override
-    public StreamsResultSet readCurrent() {
+  }
 
-        StreamsResultSet current;
+  @Override
+  public StreamsResultSet readCurrent() {
 
-        synchronized( GMailProvider.class ) {
-            current = new StreamsResultSet(new ConcurrentLinkedQueue<>(providerQueue));
-            current.setCounter(new DatumStatusCounter());
-            current.getCounter().add(countersCurrent);
-            countersTotal.add(countersCurrent);
-            countersCurrent = new DatumStatusCounter();
-            providerQueue.clear();
-        }
+    StreamsResultSet current;
 
-        return current;
+    synchronized( GMailProvider.class ) {
+      current = new StreamsResultSet(new ConcurrentLinkedQueue<>(providerQueue));
+      current.setCounter(new DatumStatusCounter());
+      current.getCounter().add(countersCurrent);
+      countersTotal.add(countersCurrent);
+      countersCurrent = new DatumStatusCounter();
+      providerQueue.clear();
     }
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
+    return current;
+  }
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
 
-    @Override
-    public boolean isRunning() {
-        return !task.isDone() && !task.isCancelled();
-    }
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public boolean isRunning() {
+    return !task.isDone() && !task.isCancelled();
+  }
 
-        Preconditions.checkNotNull(this.klass);
+  @Override
+  public void prepare(Object configurationObject) {
 
-        Preconditions.checkNotNull(config.getUserName());
-        Preconditions.checkNotNull(config.getPassword());
+    Preconditions.checkNotNull(this.klass);
 
-        rssClient = new RssGmailClient();
-        GmailConnection rssConnection = new HttpGmailConnection(config.getUserName(), config.getPassword().toCharArray());
-        rssClient.setConnection(rssConnection);
+    Preconditions.checkNotNull(config.getUserName());
+    Preconditions.checkNotNull(config.getPassword());
 
-        imapClient = new ImapGmailClient();
-        GmailConnection imapConnection = new ImapGmailConnection();
-        imapConnection.setLoginCredentials(config.getUserName(), config.getPassword().toCharArray());
-        imapClient.setConnection(imapConnection);
+    rssClient = new RssGmailClient();
+    GmailConnection rssConnection = new HttpGmailConnection(config.getUserName(), config.getPassword().toCharArray());
+    rssClient.setConnection(rssConnection);
 
-        executor = Executors.newSingleThreadExecutor();
+    imapClient = new ImapGmailClient();
+    GmailConnection imapConnection = new ImapGmailConnection();
+    imapConnection.setLoginCredentials(config.getUserName(), config.getPassword().toCharArray());
+    imapClient.setConnection(imapConnection);
 
-        startStream();
-    }
+    executor = Executors.newSingleThreadExecutor();
+
+    startStream();
+  }
 
-    @Override
-    public void cleanUp() {
-        try {
-            executor.awaitTermination(5, TimeUnit.SECONDS);
-        } catch (InterruptedException e) {
-            e.printStackTrace();
-        }
+  @Override
+  public void cleanUp() {
+    try {
+      executor.awaitTermination(5, TimeUnit.SECONDS);
+    } catch (InterruptedException e) {
+      e.printStackTrace();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailRssProviderTask.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailRssProviderTask.java b/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailRssProviderTask.java
deleted file mode 100644
index 6fbfd83..0000000
--- a/streams-contrib/streams-provider-google/google-gmail/src/main/java/com/google/gmail/provider/GMailRssProviderTask.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.google.gmail.provider;
-
-import com.googlecode.gmail4j.GmailMessage;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.util.ComponentUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-
-/**
- * Created by sblackmon on 12/10/13.
- */
-public class GMailRssProviderTask implements Runnable {
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(GMailRssProviderTask.class);
-
-    private GMailProvider provider;
-
-    public GMailRssProviderTask(GMailProvider provider) {
-        this.provider = provider;
-    }
-
-    @Override
-    public void run() {
-
-        final List<GmailMessage> messages = this.provider.rssClient.getUnreadMessages();
-        for (GmailMessage message : messages) {
-
-            StreamsDatum entry = new StreamsDatum(message);
-
-            ComponentUtils.offerUntilSuccess(entry, this.provider.providerQueue);
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gmail/src/test/java/com/google/gmail/test/GMailMessageSerDeTest.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gmail/src/test/java/com/google/gmail/test/GMailMessageSerDeTest.java b/streams-contrib/streams-provider-google/google-gmail/src/test/java/com/google/gmail/test/GMailMessageSerDeTest.java
index 13fa25a..2da9e82 100644
--- a/streams-contrib/streams-provider-google/google-gmail/src/test/java/com/google/gmail/test/GMailMessageSerDeTest.java
+++ b/streams-contrib/streams-provider-google/google-gmail/src/test/java/com/google/gmail/test/GMailMessageSerDeTest.java
@@ -20,6 +20,7 @@ package com.google.gmail.test;
 
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.junit.Assert;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -31,37 +32,37 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 
 /**
- * Tests conversion of gplus inputs to Activity
+ * Tests conversion of gmail inputs to Activity
  */
 @Ignore("ignore until test resources are available.")
 public class GMailMessageSerDeTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(GMailMessageSerDeTest.class);
+  private final static Logger LOGGER = LoggerFactory.getLogger(GMailMessageSerDeTest.class);
 
-    private ObjectMapper mapper = new ObjectMapper();
+  private ObjectMapper mapper = new ObjectMapper();
 
-    @Ignore
-    @Test
-    public void Tests()
-    {
-        mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
-        mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
+  @Ignore
+  @Test
+  public void Tests()
+  {
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, Boolean.TRUE);
+    mapper.configure(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT, Boolean.TRUE);
 
-        InputStream is = GMailMessageSerDeTest.class.getResourceAsStream("/datasift_jsons.txt");
-        InputStreamReader isr = new InputStreamReader(is);
-        BufferedReader br = new BufferedReader(isr);
+    InputStream is = GMailMessageSerDeTest.class.getResourceAsStream("/datasift_jsons.txt");
+    InputStreamReader isr = new InputStreamReader(is);
+    BufferedReader br = new BufferedReader(isr);
 
-        try {
-            while (br.ready()) {
-                String line = br.readLine();
-                LOGGER.debug(line);
+    try {
+      while (br.ready()) {
+        String line = br.readLine();
+        LOGGER.debug(line);
 
-                // implement
-            }
-        } catch( Exception e ) {
-            e.printStackTrace();
-            Assert.fail();
-        }
+        // implement
+      }
+    } catch( Exception e ) {
+      e.printStackTrace();
+      Assert.fail();
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusCommentProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusCommentProcessor.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusCommentProcessor.java
index d926541..833fe23 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusCommentProcessor.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusCommentProcessor.java
@@ -19,71 +19,78 @@
 
 package com.google.gplus.processor;
 
-import com.google.api.services.plus.model.Comment;
-import com.google.gplus.serializer.util.GooglePlusActivityUtil;
-import java.util.ArrayList;
-import java.util.List;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.pojo.json.Activity;
+
+import com.google.api.services.plus.model.Comment;
+import com.google.gplus.serializer.util.GooglePlusActivityUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * GooglePlusCommentProcessor collects comments about a google plus activity.
+ */
 public class GooglePlusCommentProcessor implements StreamsProcessor {
-    private final static String STREAMS_ID = "GooglePlusCommentProcessor";
-    private final static Logger LOGGER = LoggerFactory.getLogger(GooglePlusCommentProcessor.class);
-    private GooglePlusActivityUtil googlePlusActivityUtil;
-    private int count;
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        StreamsDatum result = null;
-
-        try {
-            Object item = entry.getDocument();
-            LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
-
-            //Get G+ activity ID from our own activity ID
-            if (item instanceof Activity) {
-                Activity activity = (Activity) item;
-                String activityId = getGPlusID(activity.getId());
-
-                //Call Google Plus API to get list of comments for this activity ID
-                /* TODO: FILL ME OUT WITH THE API CALL **/
-                List<Comment> comments = new ArrayList<>();
-
-                googlePlusActivityUtil.updateActivity(comments, activity);
-                result = new StreamsDatum(activity);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            LOGGER.error("Exception while converting Comment to Activity: {}", e.getMessage());
-        }
-
-        if( result != null )
-            return com.google.common.collect.Lists.newArrayList(result);
-        else
-            return new ArrayList<>();
-    }
+  private static final String STREAMS_ID = "GooglePlusCommentProcessor";
+  private static final Logger LOGGER = LoggerFactory.getLogger(GooglePlusCommentProcessor.class);
+  private GooglePlusActivityUtil googlePlusActivityUtil;
+  private int count;
 
-    @Override
-    public void prepare(Object configurationObject) {
-        googlePlusActivityUtil = new GooglePlusActivityUtil();
-        count = 0;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    StreamsDatum result = null;
+
+    try {
+      Object item = entry.getDocument();
+      LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
 
-    @Override
-    public void cleanUp() {
+      //Get G+ activity ID from our own activity ID
+      if (item instanceof Activity) {
+        Activity activity = (Activity) item;
+        String activityId = getGPlusID(activity.getId());
 
+        //Call Google Plus API to get list of comments for this activity ID
+        /* TODO: FILL ME OUT WITH THE API CALL **/
+        List<Comment> comments = new ArrayList<>();
+
+        googlePlusActivityUtil.updateActivity(comments, activity);
+        result = new StreamsDatum(activity);
+      }
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      LOGGER.error("Exception while converting Comment to Activity: {}", ex.getMessage());
     }
 
-    private String getGPlusID(String activityID) {
-        String[] activityParts = activityID.split(":");
-        return (activityParts.length > 0) ? activityParts[activityParts.length - 1] : "";
+    if ( result != null ) {
+      return com.google.common.collect.Lists.newArrayList(result);
+    } else {
+      return new ArrayList<>();
     }
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    googlePlusActivityUtil = new GooglePlusActivityUtil();
+    count = 0;
+  }
+
+  @Override
+  public void cleanUp() {
+
+  }
+
+  private String getGPlusID(String activityId) {
+    String[] activityParts = activityId.split(":");
+    return (activityParts.length > 0) ? activityParts[activityParts.length - 1] : "";
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusTypeConverter.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusTypeConverter.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusTypeConverter.java
index d44a487..fe4d5da 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusTypeConverter.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/processor/GooglePlusTypeConverter.java
@@ -18,7 +18,11 @@
 
 package com.google.gplus.processor;
 
-import com.fasterxml.jackson.databind.JsonNode;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProcessor;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.pojo.json.Activity;
+
 import com.fasterxml.jackson.databind.module.SimpleModule;
 import com.google.api.services.plus.model.Person;
 import com.google.common.collect.Lists;
@@ -26,112 +30,112 @@ import com.google.gplus.serializer.util.GPlusActivityDeserializer;
 import com.google.gplus.serializer.util.GPlusEventClassifier;
 import com.google.gplus.serializer.util.GPlusPersonDeserializer;
 import com.google.gplus.serializer.util.GooglePlusActivityUtil;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProcessor;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.node.ObjectNode;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.List;
 import java.util.Queue;
 
+/**
+ * GooglePlusTypeConverter is a StreamsProcessor that converts gplus activities to activitystreams activities.
+ */
 public class GooglePlusTypeConverter implements StreamsProcessor {
-    public final static String STREAMS_ID = "GooglePlusTypeConverter";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(GooglePlusTypeConverter.class);
-    private StreamsJacksonMapper mapper;
-    private Queue<Person> inQueue;
-    private Queue<StreamsDatum> outQueue;
-    private GooglePlusActivityUtil googlePlusActivityUtil;
-    private int count = 0;
-
-    public GooglePlusTypeConverter() {}
-
-    public Queue<StreamsDatum> getProcessorOutputQueue() {
-        return outQueue;
+  public static final String STREAMS_ID = "GooglePlusTypeConverter";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(GooglePlusTypeConverter.class);
+  private StreamsJacksonMapper mapper;
+  private Queue<Person> inQueue;
+  private Queue<StreamsDatum> outQueue;
+  private GooglePlusActivityUtil googlePlusActivityUtil;
+  private int count = 0;
+
+  public GooglePlusTypeConverter() {}
+
+  public Queue<StreamsDatum> getProcessorOutputQueue() {
+    return outQueue;
+  }
+
+  public void setProcessorInputQueue(Queue<Person> inputQueue) {
+    inQueue = inputQueue;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
+    StreamsDatum result = null;
+
+    try {
+      Object item = entry.getDocument();
+
+      LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
+      Activity activity = null;
+
+      if (item instanceof String) {
+        item = deserializeItem(item);
+      }
+
+      if (item instanceof Person) {
+        activity = new Activity();
+        googlePlusActivityUtil.updateActivity((Person)item, activity);
+      } else if (item instanceof com.google.api.services.plus.model.Activity) {
+        activity = new Activity();
+        googlePlusActivityUtil.updateActivity((com.google.api.services.plus.model.Activity)item, activity);
+      }
+
+      if (activity != null) {
+        result = new StreamsDatum(activity);
+        count++;
+      }
+    } catch (Exception ex) {
+      ex.printStackTrace();
+      LOGGER.error("Exception while converting Person to Activity: {}", ex.getMessage());
     }
 
-    public void setProcessorInputQueue(Queue<Person> inputQueue) {
-        inQueue = inputQueue;
+    if ( result != null ) {
+      return Lists.newArrayList(result);
+    } else {
+      return Lists.newArrayList();
     }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
-        StreamsDatum result = null;
-
-        try {
-            Object item = entry.getDocument();
-
-            LOGGER.debug("{} processing {}", STREAMS_ID, item.getClass());
-            Activity activity = null;
-
-            if(item instanceof String) {
-                item = deserializeItem(item);
-            }
-
-            if(item instanceof Person) {
-                activity = new Activity();
-                googlePlusActivityUtil.updateActivity((Person)item, activity);
-            } else if(item instanceof com.google.api.services.plus.model.Activity) {
-                activity = new Activity();
-                googlePlusActivityUtil.updateActivity((com.google.api.services.plus.model.Activity)item, activity);
-            }
-
-            if(activity != null) {
-                result = new StreamsDatum(activity);
-                count++;
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            LOGGER.error("Exception while converting Person to Activity: {}", e.getMessage());
-        }
-
-        if( result != null )
-            return Lists.newArrayList(result);
-        else
-            return Lists.newArrayList();
+  }
+
+  private Object deserializeItem(Object item) {
+    try {
+      Class klass = GPlusEventClassifier.detectClass((String) item);
+
+      if (klass.equals(Person.class)) {
+        item = mapper.readValue((String) item, Person.class);
+      } else if (klass.equals(com.google.api.services.plus.model.Activity.class)) {
+        item = mapper.readValue((String) item, com.google.api.services.plus.model.Activity.class);
+      }
+    } catch (Exception ex) {
+      LOGGER.error("Exception while trying to deserializeItem: {}", ex);
     }
 
-    private Object deserializeItem(Object item) {
-        try {
-            Class klass = GPlusEventClassifier.detectClass((String) item);
+    return item;
+  }
 
-            if (klass.equals(Person.class)) {
-                item = mapper.readValue((String) item, Person.class);
-            } else if (klass.equals(com.google.api.services.plus.model.Activity.class)) {
-                item = mapper.readValue((String) item, com.google.api.services.plus.model.Activity.class);
-            }
-        } catch (Exception e) {
-            LOGGER.error("Exception while trying to deserializeItem: {}", e);
-        }
+  @Override
+  public void prepare(Object configurationObject) {
+    googlePlusActivityUtil = new GooglePlusActivityUtil();
+    mapper = StreamsJacksonMapper.getInstance();
 
-        return item;
-    }
-
-    @Override
-    public void prepare(Object configurationObject) {
-        googlePlusActivityUtil = new GooglePlusActivityUtil();
-        mapper = StreamsJacksonMapper.getInstance();
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Person.class, new GPlusPersonDeserializer());
+    mapper.registerModule(simpleModule);
 
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Person.class, new GPlusPersonDeserializer());
-        mapper.registerModule(simpleModule);
+    simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(com.google.api.services.plus.model.Activity.class, new GPlusActivityDeserializer());
+    mapper.registerModule(simpleModule);
+  }
 
-        simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(com.google.api.services.plus.model.Activity.class, new GPlusActivityDeserializer());
-        mapper.registerModule(simpleModule);
-    }
-
-    @Override
-    public void cleanUp() {
-        //No-op
-    }
+  @Override
+  public void cleanUp() {
+    //No-op
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/AbstractGPlusProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/AbstractGPlusProvider.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/AbstractGPlusProvider.java
index 734e711..e08c571 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/AbstractGPlusProvider.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/AbstractGPlusProvider.java
@@ -18,6 +18,17 @@
 
 package com.google.gplus.provider;
 
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProvider;
+import org.apache.streams.core.StreamsResultSet;
+import org.apache.streams.google.gplus.GPlusConfiguration;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.util.ComponentUtils;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+import org.apache.streams.util.api.requests.backoff.impl.ExponentialBackOffStrategy;
+
 import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets;
 import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
 import com.google.api.client.http.HttpTransport;
@@ -31,16 +42,7 @@ import com.google.common.util.concurrent.ListenableFuture;
 import com.google.common.util.concurrent.ListeningExecutorService;
 import com.google.common.util.concurrent.MoreExecutors;
 import com.google.gson.Gson;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsProvider;
-import org.apache.streams.core.StreamsResultSet;
-import org.apache.streams.google.gplus.GPlusConfiguration;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.util.ComponentUtils;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
-import org.apache.streams.util.api.requests.backoff.impl.ExponentialBackOffStrategy;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -61,198 +63,202 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 /**
- * Provider that creates a GPlus client and will run task that queue data to an outing queue
+ * Provider that creates a GPlus client and will run task that queue data to an outing queue.
  */
 public abstract class AbstractGPlusProvider implements StreamsProvider {
 
-    public final static String STREAMS_ID = "AbstractGPlusProvider";
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(AbstractGPlusProvider.class);
-    private final static Set<String> SCOPE = new HashSet<String>() {{ add("https://www.googleapis.com/auth/plus.login");}};
-    private final static int MAX_BATCH_SIZE = 1000;
+  public static final String STREAMS_ID = "AbstractGPlusProvider";
 
-    private static final HttpTransport TRANSPORT = new NetHttpTransport();
-    private static final JacksonFactory JSON_FACTORY = new JacksonFactory();
-    private static final Gson GSON = new Gson();
-
-    private GPlusConfiguration config;
-
-    List<ListenableFuture<Object>> futures = new ArrayList<>();
-
-    private ListeningExecutorService executor;
-
-    private BlockingQueue<StreamsDatum> datumQueue;
-    private BlockingQueue<Runnable> runnables;
-    private AtomicBoolean isComplete;
-    private boolean previousPullWasEmpty;
-
-    protected GoogleClientSecrets clientSecrets;
-    protected GoogleCredential credential;
-    protected Plus plus;
-
-    public AbstractGPlusProvider() {
-        this.config = new ComponentConfigurator<>(GPlusConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("gplus"));
+  private static final Logger LOGGER = LoggerFactory.getLogger(AbstractGPlusProvider.class);
+  private static final Set<String> SCOPE = new HashSet<String>() {
+    {
+      add("https://www.googleapis.com/auth/plus.login");
     }
+  };
+  private static final int MAX_BATCH_SIZE = 1000;
 
-    public AbstractGPlusProvider(GPlusConfiguration config) {
-        this.config = config;
-    }
+  private static final HttpTransport TRANSPORT = new NetHttpTransport();
+  private static final JacksonFactory JSON_FACTORY = new JacksonFactory();
+  private static final Gson GSON = new Gson();
 
-    @Override
-    public void prepare(Object configurationObject) {
-
-        Preconditions.checkNotNull(config.getOauth().getPathToP12KeyFile());
-        Preconditions.checkNotNull(config.getOauth().getAppName());
-        Preconditions.checkNotNull(config.getOauth().getServiceAccountEmailAddress());
-
-        try {
-            this.plus = createPlusClient();
-        } catch (IOException|GeneralSecurityException e) {
-            LOGGER.error("Failed to created oauth for GPlus : {}", e);
-            throw new RuntimeException(e);
-        }
-        // GPlus rate limits you to 5 calls per second, so there is not a need to execute more than one
-        // collector unless you have multiple oauth tokens
-        //TODO make this configurable based on the number of oauth tokens
-        this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1));
-        this.datumQueue = new LinkedBlockingQueue<>(1000);
-        this.isComplete = new AtomicBoolean(false);
-        this.previousPullWasEmpty = false;
-    }
+  private GPlusConfiguration config;
 
-    @Override
-    public void startStream() {
-
-        BackOffStrategy backOffStrategy = new ExponentialBackOffStrategy(2);
-        for(UserInfo user : this.config.getGooglePlusUsers()) {
-            if(this.config.getDefaultAfterDate() != null && user.getAfterDate() == null) {
-                user.setAfterDate(this.config.getDefaultAfterDate());
-            }
-            if(this.config.getDefaultBeforeDate() != null && user.getBeforeDate() == null) {
-                user.setBeforeDate(this.config.getDefaultBeforeDate());
-            }
-            this.executor.submit(getDataCollector(backOffStrategy, this.datumQueue, this.plus, user));
-        }
-        this.executor.shutdown();
-    }
+  List<ListenableFuture<Object>> futures = new ArrayList<>();
 
-    protected abstract Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, Plus plus, UserInfo userInfo);
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  private ListeningExecutorService executor;
 
-    @Override
-    public StreamsResultSet readCurrent() {
-        BlockingQueue<StreamsDatum> batch = new LinkedBlockingQueue<>();
-        int batchCount = 0;
-        while(!this.datumQueue.isEmpty() && batchCount < MAX_BATCH_SIZE) {
-            StreamsDatum datum = ComponentUtils.pollWhileNotEmpty(this.datumQueue);
-            if(datum != null) {
-                ++batchCount;
-                ComponentUtils.offerUntilSuccess(datum, batch);
-            }
-        }
-        boolean pullIsEmpty = batch.isEmpty() && this.datumQueue.isEmpty() &&this.executor.isTerminated();
-        this.isComplete.set(this.previousPullWasEmpty && pullIsEmpty);
-        this.previousPullWasEmpty = pullIsEmpty;
-        return new StreamsResultSet(batch);
-    }
+  private BlockingQueue<StreamsDatum> datumQueue;
+  private BlockingQueue<Runnable> runnables;
+  private AtomicBoolean isComplete;
+  private boolean previousPullWasEmpty;
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
+  protected GoogleClientSecrets clientSecrets;
+  protected GoogleCredential credential;
+  protected Plus plus;
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
+  public AbstractGPlusProvider() {
+    this.config = new ComponentConfigurator<>(GPlusConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("gplus"));
+  }
 
-    @VisibleForTesting
-    protected Plus createPlusClient() throws IOException, GeneralSecurityException {
-        credential = new GoogleCredential.Builder()
-                .setJsonFactory(JSON_FACTORY)
-                .setTransport(TRANSPORT)
-                .setServiceAccountScopes(SCOPE)
-                .setServiceAccountId(this.config.getOauth().getServiceAccountEmailAddress())
-                .setServiceAccountPrivateKeyFromP12File(new File(this.config.getOauth().getPathToP12KeyFile()))
-                .build();
-        return new Plus.Builder(TRANSPORT,JSON_FACTORY, credential).setApplicationName(this.config.getOauth().getAppName()).build();
-    }
+  public AbstractGPlusProvider(GPlusConfiguration config) {
+    this.config = config;
+  }
 
-    @Override
-    public void cleanUp() {
-        ComponentUtils.shutdownExecutor(this.executor, 10, 10);
-        this.executor = null;
-    }
+  @Override
+  public void prepare(Object configurationObject) {
 
-    public GPlusConfiguration getConfig() {
-        return config;
-    }
+    Preconditions.checkNotNull(config.getOauth().getPathToP12KeyFile());
+    Preconditions.checkNotNull(config.getOauth().getAppName());
+    Preconditions.checkNotNull(config.getOauth().getServiceAccountEmailAddress());
 
-    public void setConfig(GPlusConfiguration config) {
-        this.config = config;
+    try {
+      this.plus = createPlusClient();
+    } catch (IOException | GeneralSecurityException ex) {
+      LOGGER.error("Failed to created oauth for GPlus : {}", ex);
+      throw new RuntimeException(ex);
     }
-
-    /**
-     * Set and overwrite the default before date that was read from the configuration file.
-     * @param defaultBeforeDate
-     */
-    public void setDefaultBeforeDate(DateTime defaultBeforeDate) {
-        this.config.setDefaultBeforeDate(defaultBeforeDate);
+    // GPlus rate limits you to 5 calls per second, so there is not a need to execute more than one
+    // collector unless you have multiple oauth tokens
+    //TODO make this configurable based on the number of oauth tokens
+    this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1));
+    this.datumQueue = new LinkedBlockingQueue<>(1000);
+    this.isComplete = new AtomicBoolean(false);
+    this.previousPullWasEmpty = false;
+  }
+
+  @Override
+  public void startStream() {
+
+    BackOffStrategy backOffStrategy = new ExponentialBackOffStrategy(2);
+    for (UserInfo user : this.config.getGooglePlusUsers()) {
+      if (this.config.getDefaultAfterDate() != null && user.getAfterDate() == null) {
+        user.setAfterDate(this.config.getDefaultAfterDate());
+      }
+      if (this.config.getDefaultBeforeDate() != null && user.getBeforeDate() == null) {
+        user.setBeforeDate(this.config.getDefaultBeforeDate());
+      }
+      this.executor.submit(getDataCollector(backOffStrategy, this.datumQueue, this.plus, user));
     }
-
-    /**
-     * Set and overwrite the default after date that was read from teh configuration file.
-     * @param defaultAfterDate
-     */
-    public void setDefaultAfterDate(DateTime defaultAfterDate) {
-        this.config.setDefaultAfterDate(defaultAfterDate);
+    this.executor.shutdown();
+  }
+
+  protected abstract Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, Plus plus, UserInfo userInfo);
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    BlockingQueue<StreamsDatum> batch = new LinkedBlockingQueue<>();
+    int batchCount = 0;
+    while (!this.datumQueue.isEmpty() && batchCount < MAX_BATCH_SIZE) {
+      StreamsDatum datum = ComponentUtils.pollWhileNotEmpty(this.datumQueue);
+      if (datum != null) {
+        ++batchCount;
+        ComponentUtils.offerUntilSuccess(datum, batch);
+      }
     }
-
-    /**
-     * Sets and overwrite the user info from the configuaration file.  Uses the defaults before and after dates.
-     * @param userIds
-     */
-    public void setUserInfoWithDefaultDates(Set<String> userIds) {
-        List<UserInfo> gPlusUsers = new LinkedList<>();
-        for(String userId : userIds) {
-            UserInfo user = new UserInfo();
-            user.setUserId(userId);
-            user.setAfterDate(this.config.getDefaultAfterDate());
-            user.setBeforeDate(this.config.getDefaultBeforeDate());
-            gPlusUsers.add(user);
-        }
-        this.config.setGooglePlusUsers(gPlusUsers);
+    boolean pullIsEmpty = batch.isEmpty() && this.datumQueue.isEmpty() && this.executor.isTerminated();
+    this.isComplete.set(this.previousPullWasEmpty && pullIsEmpty);
+    this.previousPullWasEmpty = pullIsEmpty;
+    return new StreamsResultSet(batch);
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @VisibleForTesting
+  protected Plus createPlusClient() throws IOException, GeneralSecurityException {
+    credential = new GoogleCredential.Builder()
+        .setJsonFactory(JSON_FACTORY)
+        .setTransport(TRANSPORT)
+        .setServiceAccountScopes(SCOPE)
+        .setServiceAccountId(this.config.getOauth().getServiceAccountEmailAddress())
+        .setServiceAccountPrivateKeyFromP12File(new File(this.config.getOauth().getPathToP12KeyFile()))
+        .build();
+    return new Plus.Builder(TRANSPORT,JSON_FACTORY, credential).setApplicationName(this.config.getOauth().getAppName()).build();
+  }
+
+  @Override
+  public void cleanUp() {
+    ComponentUtils.shutdownExecutor(this.executor, 10, 10);
+    this.executor = null;
+  }
+
+  public GPlusConfiguration getConfig() {
+    return config;
+  }
+
+  public void setConfig(GPlusConfiguration config) {
+    this.config = config;
+  }
+
+  /**
+   * Set and overwrite the default before date that was read from the configuration file.
+   * @param defaultBeforeDate defaultBeforeDate
+   */
+  public void setDefaultBeforeDate(DateTime defaultBeforeDate) {
+    this.config.setDefaultBeforeDate(defaultBeforeDate);
+  }
+
+  /**
+   * Set and overwrite the default after date that was read from teh configuration file.
+   * @param defaultAfterDate defaultAfterDate
+   */
+  public void setDefaultAfterDate(DateTime defaultAfterDate) {
+    this.config.setDefaultAfterDate(defaultAfterDate);
+  }
+
+  /**
+   * Sets and overwrite the user info from the configuaration file.  Uses the defaults before and after dates.
+   * @param userIds userIds
+   */
+  public void setUserInfoWithDefaultDates(Set<String> userIds) {
+    List<UserInfo> gplusUsers = new LinkedList<>();
+    for (String userId : userIds) {
+      UserInfo user = new UserInfo();
+      user.setUserId(userId);
+      user.setAfterDate(this.config.getDefaultAfterDate());
+      user.setBeforeDate(this.config.getDefaultBeforeDate());
+      gplusUsers.add(user);
     }
-
-    /**
-     * Set and overwrite user into from teh configuration file. Only sets after dater.
-     * @param usersAndAfterDates
-     */
-    public void setUserInfoWithAfterDate(Map<String, DateTime> usersAndAfterDates) {
-        List<UserInfo> gPlusUsers = new LinkedList<>();
-        for(String userId : usersAndAfterDates.keySet()) {
-            UserInfo user = new UserInfo();
-            user.setUserId(userId);
-            user.setAfterDate(usersAndAfterDates.get(userId));
-            gPlusUsers.add(user);
-        }
-        this.config.setGooglePlusUsers(gPlusUsers);
+    this.config.setGooglePlusUsers(gplusUsers);
+  }
+
+  /**
+   * Set and overwrite user into from the configuration file. Only sets after date.
+   * @param usersAndAfterDates usersAndAfterDates
+   */
+  public void setUserInfoWithAfterDate(Map<String, DateTime> usersAndAfterDates) {
+    List<UserInfo> gplusUsers = new LinkedList<>();
+    for (String userId : usersAndAfterDates.keySet()) {
+      UserInfo user = new UserInfo();
+      user.setUserId(userId);
+      user.setAfterDate(usersAndAfterDates.get(userId));
+      gplusUsers.add(user);
     }
-
-    @Override
-    public boolean isRunning() {
-       if (datumQueue.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
-            LOGGER.info("Completed");
-            isComplete.set(true);
-           LOGGER.info("Exiting");
-       }
-       return !isComplete.get();
+    this.config.setGooglePlusUsers(gplusUsers);
+  }
+
+  @Override
+  public boolean isRunning() {
+    if (datumQueue.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
+      LOGGER.info("Completed");
+      isComplete.set(true);
+      LOGGER.info("Exiting");
     }
+    return !isComplete.get();
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusActivitySerializer.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusActivitySerializer.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusActivitySerializer.java
index 4991e94..20f5002 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusActivitySerializer.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusActivitySerializer.java
@@ -18,50 +18,54 @@
 
 package com.google.gplus.provider;
 
-import com.google.gplus.serializer.util.GooglePlusActivityUtil;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivitySerializer;
 import org.apache.streams.pojo.json.Activity;
+
+import com.google.gplus.serializer.util.GooglePlusActivityUtil;
+
+import org.apache.commons.lang.NotImplementedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.List;
 
-
+/**
+ * GPlusActivitySerializer converts gplus activities to as1 activities.
+ */
 public class GPlusActivitySerializer implements ActivitySerializer<com.google.api.services.plus.model.Activity> {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(GPlusActivitySerializer.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusActivitySerializer.class);
 
-    AbstractGPlusProvider provider;
+  AbstractGPlusProvider provider;
 
-    public GPlusActivitySerializer(AbstractGPlusProvider provider) {
+  public GPlusActivitySerializer(AbstractGPlusProvider provider) {
 
-        this.provider = provider;
-    }
+    this.provider = provider;
+  }
 
-    public GPlusActivitySerializer() {
-    }
+  public GPlusActivitySerializer() {
+  }
 
-    @Override
-    public String serializationFormat() {
-        return "gplus.v1";
-    }
+  @Override
+  public String serializationFormat() {
+    return "gplus.v1";
+  }
 
-    @Override
-    public com.google.api.services.plus.model.Activity serialize(Activity deserialized) {
-        throw new NotImplementedException("Not currently implemented");
-    }
+  @Override
+  public com.google.api.services.plus.model.Activity serialize(Activity deserialized) {
+    throw new NotImplementedException("Not currently implemented");
+  }
 
-    @Override
-    public Activity deserialize(com.google.api.services.plus.model.Activity gplusActivity) {
-        Activity activity = new Activity();
+  @Override
+  public Activity deserialize(com.google.api.services.plus.model.Activity gplusActivity) {
+    Activity activity = new Activity();
 
-        GooglePlusActivityUtil.updateActivity(gplusActivity, activity);
-        return activity;
-    }
+    GooglePlusActivityUtil.updateActivity(gplusActivity, activity);
+    return activity;
+  }
 
-    @Override
-    public List<Activity> deserializeAll(List<com.google.api.services.plus.model.Activity> serializedList) {
-        throw new NotImplementedException("Not currently implemented");
-    }
+  @Override
+  public List<Activity> deserializeAll(List<com.google.api.services.plus.model.Activity> serializedList) {
+    throw new NotImplementedException("Not currently implemented");
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusDataCollector.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusDataCollector.java
index 5be2f9c..edbc663 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusDataCollector.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusDataCollector.java
@@ -18,51 +18,52 @@
 
 package com.google.gplus.provider;
 
-import com.google.api.client.googleapis.json.GoogleJsonResponseException;
 import org.apache.streams.util.api.requests.backoff.BackOffException;
 import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
+import com.google.api.client.googleapis.json.GoogleJsonResponseException;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- *
+ * GPlusDataCollector collects GPlus Data on behalf of providers.
  */
 public abstract class GPlusDataCollector implements Runnable {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(GPlusDataCollector.class);
-
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusDataCollector.class);
 
-    /**
-     * Looks at the status code of the expception.  If the code indicates that the request should be retried,
-     * it executes the back off strategy and returns true.
-     * @param gjre
-     * @param backOff
-     * @return returns true if the error code of the exception indicates the request should be retried.
-     */
-    public boolean backoffAndIdentifyIfRetry(GoogleJsonResponseException gjre, BackOffStrategy backOff) throws BackOffException {
-        boolean tryAgain = false;
-        switch (gjre.getStatusCode()) {
-            case 400 :
-                LOGGER.warn("Bad Request  : {}",  gjre);
-                break;
-            case 401 :
-                LOGGER.warn("Invalid Credentials : {}", gjre);
-            case 403 :
-                LOGGER.warn("Possible rate limit exception. Retrying. : {}", gjre.getMessage());
-                backOff.backOff();
-                tryAgain = true;
-                break;
-            case 503 :
-                LOGGER.warn("Google Backend Service Error : {}", gjre);
-                break;
-            default:
-                LOGGER.warn("Google Service returned error : {}", gjre);
-                tryAgain = true;
-                backOff.backOff();
-                break;
-        }
-        return tryAgain;
+  /**
+   * Looks at the status code of the exception.  If the code indicates that the request should be retried,
+   * it executes the back off strategy and returns true.
+   * @param gjre GoogleJsonResponseException
+   * @param backOff BackOffStrategy
+   * @return returns true if the error code of the exception indicates the request should be retried.
+   */
+  public boolean backoffAndIdentifyIfRetry(GoogleJsonResponseException gjre, BackOffStrategy backOff) throws BackOffException {
+    boolean tryAgain = false;
+    switch (gjre.getStatusCode()) {
+      case 400 :
+        LOGGER.warn("Bad Request  : {}",  gjre);
+        break;
+      case 401 :
+        LOGGER.warn("Invalid Credentials : {}", gjre);
+        break;
+      case 403 :
+        LOGGER.warn("Possible rate limit exception. Retrying. : {}", gjre.getMessage());
+        backOff.backOff();
+        tryAgain = true;
+        break;
+      case 503 :
+        LOGGER.warn("Google Backend Service Error : {}", gjre);
+        break;
+      default:
+        LOGGER.warn("Google Service returned error : {}", gjre);
+        tryAgain = true;
+        backOff.backOff();
+        break;
     }
-
-
+    return tryAgain;
+  }
+  
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusEventProcessor.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusEventProcessor.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusEventProcessor.java
deleted file mode 100644
index 1f1ee2f..0000000
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusEventProcessor.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.google.gplus.provider;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.pojo.json.Activity;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Objects;
-import java.util.Queue;
-import java.util.Random;
-import java.util.concurrent.BlockingQueue;
-
-public class GPlusEventProcessor implements Runnable {
-
-    private final static Logger LOGGER = LoggerFactory.getLogger(GPlusEventProcessor.class);
-
-    private ObjectMapper mapper = new ObjectMapper();
-
-    private BlockingQueue<String> inQueue;
-    private Queue<StreamsDatum> outQueue;
-
-    private Class outClass;
-
-    private GPlusActivitySerializer gPlusActivitySerializer = new GPlusActivitySerializer();
-
-    private final static String TERMINATE = "TERMINATE";
-
-    public GPlusEventProcessor(BlockingQueue<String> inQueue, Queue<StreamsDatum> outQueue, Class inClass, Class outClass) {
-        this.inQueue = inQueue;
-        this.outQueue = outQueue;
-        this.outClass = outClass;
-    }
-
-    public GPlusEventProcessor(BlockingQueue<String> inQueue, Queue<StreamsDatum> outQueue, Class outClass) {
-        this.inQueue = inQueue;
-        this.outQueue = outQueue;
-        this.outClass = outClass;
-    }
-
-    @Override
-    public void run() {
-
-        while(true) {
-            try {
-                String item = inQueue.take();
-                Thread.sleep(new Random().nextInt(100));
-                if(Objects.equals(item, TERMINATE)) {
-                    LOGGER.info("Terminating!");
-                    break;
-                }
-
-                // first check for valid json
-                ObjectNode node = (ObjectNode)mapper.readTree(item);
-
-                // if the target is string, just pass-through
-                if( String.class.equals(outClass))
-                    outQueue.offer(new StreamsDatum(item));
-                else {
-                    // convert to desired format
-                    com.google.api.services.plus.model.Activity gplusActivity = mapper.readValue(item, com.google.api.services.plus.model.Activity.class);
-
-                    Activity streamsActivity = gPlusActivitySerializer.deserialize(gplusActivity);
-
-                    outQueue.offer(new StreamsDatum(streamsActivity));
-                }
-
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityCollector.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityCollector.java
index f475e5d..5585bfc 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityCollector.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityCollector.java
@@ -18,6 +18,11 @@
 
 package com.google.gplus.provider;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import com.fasterxml.jackson.core.JsonGenerationException;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.databind.DeserializationFeature;
@@ -30,10 +35,7 @@ import com.google.api.services.plus.Plus;
 import com.google.api.services.plus.model.Activity;
 import com.google.api.services.plus.model.ActivityFeed;
 import com.google.gplus.serializer.util.GPlusActivityDeserializer;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -46,90 +48,107 @@ import java.util.concurrent.BlockingQueue;
  */
 public class GPlusUserActivityCollector extends GPlusDataCollector {
 
-    /**
-     * Key for all public activities
-     * https://developers.google.com/+/api/latest/activities/list
-     */
-    private static final String PUBLIC_COLLECTION = "public";
-    /**
-     * Max results allowed per request
-     * https://developers.google.com/+/api/latest/activities/list
-     */
-    private static final long MAX_RESULTS = 100;
-    private static final int MAX_ATTEMPTS = 5;
-    private static final Logger LOGGER = LoggerFactory.getLogger(GPlusUserActivityCollector.class);
-    private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  /**
+   * Key for all public activities
+   * https://developers.google.com/+/api/latest/activities/list
+   */
+  private static final String PUBLIC_COLLECTION = "public";
+  /**
+   * Max results allowed per request
+   * https://developers.google.com/+/api/latest/activities/list
+   */
+  private static final long MAX_RESULTS = 100;
+  private static final int MAX_ATTEMPTS = 5;
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusUserActivityCollector.class);
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-    static { //set up mapper for Google Activity Object
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Activity.class, new GPlusActivityDeserializer());
-        simpleModule.addSerializer(com.google.api.client.util.DateTime.class, new StdSerializer<com.google.api.client.util.DateTime>(com.google.api.client.util.DateTime.class) {
-            @Override
-            public void serialize(com.google.api.client.util.DateTime dateTime, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonGenerationException {
-                jsonGenerator.writeString(dateTime.toStringRfc3339());
-            }
+  static { //set up mapper for Google Activity Object
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Activity.class, new GPlusActivityDeserializer());
+    simpleModule.addSerializer(
+        com.google.api.client.util.DateTime.class,
+        new StdSerializer<com.google.api.client.util.DateTime>(com.google.api.client.util.DateTime.class) {
+          @Override
+          public void serialize(
+              com.google.api.client.util.DateTime dateTime,
+              JsonGenerator jsonGenerator,
+              SerializerProvider serializerProvider)
+              throws IOException {
+            jsonGenerator.writeString(dateTime.toStringRfc3339());
+          }
         });
-        MAPPER.registerModule(simpleModule);
-        MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-    }
+    MAPPER.registerModule(simpleModule);
+    MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
 
-    private BlockingQueue<StreamsDatum> datumQueue;
-    private BackOffStrategy backOff;
-    private Plus gPlus;
-    private UserInfo userInfo;
+  private BlockingQueue<StreamsDatum> datumQueue;
+  private BackOffStrategy backOff;
+  private Plus plus;
+  private UserInfo userInfo;
 
-    public GPlusUserActivityCollector(Plus gPlus, BlockingQueue<StreamsDatum> datumQueue, BackOffStrategy backOff, UserInfo userInfo) {
-        this.gPlus = gPlus;
-        this.datumQueue = datumQueue;
-        this.backOff = backOff;
-        this.userInfo = userInfo;
-    }
+  /**
+   * GPlusUserActivityCollector constructor.
+   * @param plus Plus
+   * @param datumQueue BlockingQueue<StreamsDatum>
+   * @param backOff BackOffStrategy
+   * @param userInfo UserInfo
+   */
+  public GPlusUserActivityCollector(Plus plus, BlockingQueue<StreamsDatum> datumQueue, BackOffStrategy backOff, UserInfo userInfo) {
+    this.plus = plus;
+    this.datumQueue = datumQueue;
+    this.backOff = backOff;
+    this.userInfo = userInfo;
+  }
 
-    @Override
-    public void run() {
-        collectActivityData();
-    }
+  @Override
+  public void run() {
+    collectActivityData();
+  }
 
-    protected void collectActivityData() {
+  protected void collectActivityData() {
+    try {
+      ActivityFeed feed = null;
+      boolean tryAgain = false;
+      int attempt = 0;
+      DateTime afterDate = userInfo.getAfterDate();
+      DateTime beforeDate = userInfo.getBeforeDate();
+      do {
         try {
-            ActivityFeed feed = null;
-            boolean tryAgain = false;
-            int attempt = 0;
-            DateTime afterDate = userInfo.getAfterDate();
-            DateTime beforeDate = userInfo.getBeforeDate();
-            do {
-                try {
-                    if(feed == null) {
-                        feed = this.gPlus.activities().list(this.userInfo.getUserId(), PUBLIC_COLLECTION).setMaxResults(MAX_RESULTS).execute();
-                    } else {
-                        feed = this.gPlus.activities().list(this.userInfo.getUserId(), PUBLIC_COLLECTION).setMaxResults(MAX_RESULTS).setPageToken(feed.getNextPageToken()).execute();
-                    }
-                    this.backOff.reset(); //successful pull reset api.
-                    for(com.google.api.services.plus.model.Activity activity : feed.getItems()) {
-                        DateTime published = new DateTime(activity.getPublished().getValue());
-                        if(        (afterDate == null && beforeDate == null)
-                                || (beforeDate == null && afterDate.isBefore(published))
-                                || (afterDate == null && beforeDate.isAfter(published))
-                                || ((afterDate != null && beforeDate != null) && (afterDate.isBefore(published) && beforeDate.isAfter(published)))) {
-                            String json = MAPPER.writeValueAsString(activity);
-                            this.datumQueue.put(new StreamsDatum(json, activity.getId()));
-                        } else if(afterDate != null && afterDate.isAfter(published)) {
-                            feed.setNextPageToken(null); // do not fetch next page
-                            break;
-                        }
-                    }
-                } catch (GoogleJsonResponseException gjre) {
-                    tryAgain = backoffAndIdentifyIfRetry(gjre, this.backOff);
-                    ++attempt;
-                }
-            } while((tryAgain || (feed != null && feed.getNextPageToken() != null)) && attempt < MAX_ATTEMPTS);
-        } catch (Throwable t) {
-            if(t instanceof InterruptedException) {
-                Thread.currentThread().interrupt();
+          if (feed == null) {
+            feed = this.plus.activities().list(this.userInfo.getUserId(), PUBLIC_COLLECTION)
+                .setMaxResults(MAX_RESULTS).execute();
+          } else {
+            feed = this.plus.activities().list(this.userInfo.getUserId(), PUBLIC_COLLECTION)
+                .setMaxResults(MAX_RESULTS)
+                .setPageToken(feed.getNextPageToken()).execute();
+          }
+          this.backOff.reset(); //successful pull reset api.
+          for (com.google.api.services.plus.model.Activity activity : feed.getItems()) {
+            DateTime published = new DateTime(activity.getPublished().getValue());
+            if ((afterDate == null && beforeDate == null)
+                || (beforeDate == null && afterDate.isBefore(published))
+                || (afterDate == null && beforeDate.isAfter(published))
+                || ((afterDate != null && beforeDate != null) && (afterDate.isBefore(published) && beforeDate.isAfter(published)))) {
+              String json = MAPPER.writeValueAsString(activity);
+              this.datumQueue.put(new StreamsDatum(json, activity.getId()));
+            } else if (afterDate != null && afterDate.isAfter(published)) {
+              feed.setNextPageToken(null); // do not fetch next page
+              break;
             }
-            t.printStackTrace();
-            LOGGER.warn("Unable to pull Activities for user={} : {}",this.userInfo.getUserId(), t);
+          }
+        } catch (GoogleJsonResponseException gjre) {
+          tryAgain = backoffAndIdentifyIfRetry(gjre, this.backOff);
+          ++attempt;
         }
+      }
+      while ((tryAgain || (feed != null && feed.getNextPageToken() != null)) && attempt < MAX_ATTEMPTS);
+    } catch (Throwable th) {
+      if (th instanceof InterruptedException) {
+        Thread.currentThread().interrupt();
+      }
+      th.printStackTrace();
+      LOGGER.warn("Unable to pull Activities for user={} : {}",this.userInfo.getUserId(), th);
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityProvider.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityProvider.java
index e6b2223..97b08fd 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityProvider.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserActivityProvider.java
@@ -18,13 +18,6 @@
 
 package com.google.gplus.provider;
 
-import com.google.api.services.plus.Plus;
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.Uninterruptibles;
-import com.google.gson.Gson;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
@@ -33,6 +26,14 @@ import org.apache.streams.google.gplus.GPlusConfiguration;
 import org.apache.streams.google.gplus.configuration.UserInfo;
 import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 
+import com.google.api.services.plus.Plus;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.Uninterruptibles;
+import com.google.gson.Gson;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+
 import java.io.BufferedOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
@@ -43,76 +44,88 @@ import java.util.concurrent.TimeUnit;
 /**
  *  Retrieve recent activity from a list of accounts.
  *
+ *  <p/>
  *  To use from command line:
  *
+ *  <p/>
  *  Supply (at least) the following required configuration in application.conf:
  *
+ *  <p/>
  *  gplus.oauth.pathToP12KeyFile
  *  gplus.oauth.serviceAccountEmailAddress
  *  gplus.apiKey
  *  gplus.googlePlusUsers
  *
+ *  <p/>
  *  Launch using:
  *
+ *  <p/>
  *  mvn exec:java -Dexec.mainClass=com.google.gplus.provider.GPlusUserActivityProvider -Dexec.args="application.conf activity.json"
  */
-public class GPlusUserActivityProvider extends AbstractGPlusProvider{
-
-    private final static String STREAMS_ID = "GPlusUserActivityProvider";
-
-    public GPlusUserActivityProvider() {
-        super();
-    }
-
-    public GPlusUserActivityProvider(GPlusConfiguration config) {
-        super(config);
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, Plus plus, UserInfo userInfo) {
-        return new GPlusUserActivityCollector(plus, queue, strategy, userInfo);
-    }
-
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        GPlusConfiguration config = new ComponentConfigurator<>(GPlusConfiguration.class).detectConfiguration(typesafe, "gplus");
-        GPlusUserActivityProvider provider = new GPlusUserActivityProvider(config);
-
-        Gson gson = new Gson();
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            for (StreamsDatum datum : provider.readCurrent()) {
-                String json;
-                if (datum.getDocument() instanceof String)
-                    json = (String) datum.getDocument();
-                else
-                    json = gson.toJson(datum.getDocument());
-                outStream.println(json);
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+public class GPlusUserActivityProvider extends AbstractGPlusProvider {
+
+  private static final String STREAMS_ID = "GPlusUserActivityProvider";
+
+  public GPlusUserActivityProvider() {
+    super();
+  }
+
+  public GPlusUserActivityProvider(GPlusConfiguration config) {
+    super(config);
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, Plus plus, UserInfo userInfo) {
+    return new GPlusUserActivityCollector(plus, queue, strategy, userInfo);
+  }
+
+  /**
+   * Retrieve recent activity from a list of accounts.
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    GPlusConfiguration config = new ComponentConfigurator<>(GPlusConfiguration.class).detectConfiguration(typesafe, "gplus");
+    GPlusUserActivityProvider provider = new GPlusUserActivityProvider(config);
+
+    Gson gson = new Gson();
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      for (StreamsDatum datum : provider.readCurrent()) {
+        String json;
+        if (datum.getDocument() instanceof String) {
+          json = (String) datum.getDocument();
+        } else {
+          json = gson.toJson(datum.getDocument());
+        }
+        outStream.println(json);
+      }
     }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataCollector.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataCollector.java
index 78a1649..3da3468 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataCollector.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataCollector.java
@@ -18,6 +18,11 @@
 
 package com.google.gplus.provider;
 
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.google.gplus.configuration.UserInfo;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.module.SimpleModule;
@@ -25,73 +30,77 @@ import com.google.api.client.googleapis.json.GoogleJsonResponseException;
 import com.google.api.services.plus.Plus;
 import com.google.api.services.plus.model.Person;
 import com.google.gplus.serializer.util.GPlusPersonDeserializer;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.BlockingQueue;
 
 /**
- * Collects user profile information for a specific GPlus user
+ * Collects user profile information for a specific GPlus user.
  */
 public  class GPlusUserDataCollector extends GPlusDataCollector {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(GPlusUserDataCollector.class);
-    private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-    private static final int MAX_ATTEMPTS = 5;
+  private static final Logger LOGGER = LoggerFactory.getLogger(GPlusUserDataCollector.class);
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static final int MAX_ATTEMPTS = 5;
 
-    static { //set up Mapper for Person objects
-        SimpleModule simpleModule = new SimpleModule();
-        simpleModule.addDeserializer(Person.class, new GPlusPersonDeserializer());
-        MAPPER.registerModule(simpleModule);
-        MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
-    }
+  static { //set up Mapper for Person objects
+    SimpleModule simpleModule = new SimpleModule();
+    simpleModule.addDeserializer(Person.class, new GPlusPersonDeserializer());
+    MAPPER.registerModule(simpleModule);
+    MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+  }
 
-    private BackOffStrategy backOffStrategy;
-    private Plus gPlus;
-    private BlockingQueue<StreamsDatum> datumQueue;
-    private UserInfo userInfo;
+  private BackOffStrategy backOffStrategy;
+  private Plus plus;
+  private BlockingQueue<StreamsDatum> datumQueue;
+  private UserInfo userInfo;
 
+  /**
+   * GPlusUserDataCollector constructor.
+   * @param plus Plus
+   * @param backOffStrategy BackOffStrategy
+   * @param datumQueue BlockingQueue of StreamsDatum
+   * @param userInfo UserInfo
+   */
+  public GPlusUserDataCollector(Plus plus, BackOffStrategy backOffStrategy, BlockingQueue<StreamsDatum> datumQueue, UserInfo userInfo) {
+    this.plus = plus;
+    this.backOffStrategy = backOffStrategy;
+    this.datumQueue = datumQueue;
+    this.userInfo = userInfo;
+  }
 
-    public GPlusUserDataCollector(Plus gPlus, BackOffStrategy backOffStrategy, BlockingQueue<StreamsDatum> datumQueue, UserInfo userInfo) {
-        this.gPlus = gPlus;
-        this.backOffStrategy = backOffStrategy;
-        this.datumQueue = datumQueue;
-        this.userInfo = userInfo;
-    }
-
-    protected void queueUserHistory() {
+  protected void queueUserHistory() {
+    try {
+      boolean tryAgain = false;
+      int attempts = 0;
+      com.google.api.services.plus.model.Person person = null;
+      do {
         try {
-            boolean tryAgain = false;
-            int attempts = 0;
-            com.google.api.services.plus.model.Person person = null;
-            do {
-                try {
-                    person = this.gPlus.people().get(userInfo.getUserId()).execute();
-                    this.backOffStrategy.reset();
-                    tryAgain = person == null;
-                } catch (GoogleJsonResponseException gjre) {
-                    tryAgain = backoffAndIdentifyIfRetry(gjre, this.backOffStrategy);
-                }
-                ++attempts;
-            } while(tryAgain && attempts < MAX_ATTEMPTS);
-            String json = MAPPER.writeValueAsString(person);
-            this.datumQueue.put(new StreamsDatum(json, person.getId()));
-        } catch (Throwable t) {
-            LOGGER.warn("Unable to pull user data for user={} : {}", userInfo.getUserId(), t);
-            if(t instanceof InterruptedException) {
-                Thread.currentThread().interrupt();
-            }
+          person = this.plus.people().get(userInfo.getUserId()).execute();
+          this.backOffStrategy.reset();
+          tryAgain = person == null;
+        } catch (GoogleJsonResponseException gjre) {
+          tryAgain = backoffAndIdentifyIfRetry(gjre, this.backOffStrategy);
         }
+        ++attempts;
+      }
+      while (tryAgain && attempts < MAX_ATTEMPTS);
+      String json = MAPPER.writeValueAsString(person);
+      this.datumQueue.put(new StreamsDatum(json, person.getId()));
+    } catch (Throwable throwable) {
+      LOGGER.warn("Unable to pull user data for user={} : {}", userInfo.getUserId(), throwable);
+      if (throwable instanceof InterruptedException) {
+        Thread.currentThread().interrupt();
+      }
     }
+  }
 
-    @Override
-    public void run() {
-        queueUserHistory();
-    }
+  @Override
+  public void run() {
+    queueUserHistory();
+  }
 
 
 

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataProvider.java b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataProvider.java
index 1541818..28bcb55 100644
--- a/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataProvider.java
+++ b/streams-contrib/streams-provider-google/google-gplus/src/main/java/com/google/gplus/provider/GPlusUserDataProvider.java
@@ -18,25 +18,22 @@
 
 package com.google.gplus.provider;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.api.services.plus.Plus;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.google.common.util.concurrent.Uninterruptibles;
-import com.google.gson.Gson;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.google.gplus.GPlusConfiguration;
 import org.apache.streams.google.gplus.configuration.UserInfo;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.util.api.requests.backoff.BackOffStrategy;
 
+import com.google.api.services.plus.Plus;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.Uninterruptibles;
+import com.google.gson.Gson;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+
 import java.io.BufferedOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
@@ -48,78 +45,90 @@ import java.util.concurrent.TimeUnit;
 /**
  *  Retrieve current profile status for a list of accounts.
  *
+ *  <p/>
  *  To use from command line:
  *
+ *  <p/>
  *  Supply (at least) the following required configuration in application.conf:
  *
+ *  <p/>
  *  gplus.oauth.pathToP12KeyFile
  *  gplus.oauth.serviceAccountEmailAddress
  *  gplus.apiKey
  *  gplus.googlePlusUsers
  *
+ *  <p/>
  *  Launch using:
  *
+ *  <p/>
  *  mvn exec:java -Dexec.mainClass=com.google.gplus.provider.GPlusUserDataProvider -Dexec.args="application.conf profiles.json"
  */
-public class GPlusUserDataProvider extends AbstractGPlusProvider{
-
-    public final static String STREAMS_ID = "GPlusUserDataProvider";
-
-    public GPlusUserDataProvider() {
-        super();
-    }
-
-    public GPlusUserDataProvider(GPlusConfiguration config) {
-        super(config);
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, Plus plus, UserInfo userInfo) {
-        return new GPlusUserDataCollector(plus, strategy, queue, userInfo);
-    }
-
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config testResourceConfig = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = testResourceConfig.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        GPlusConfiguration config = new ComponentConfigurator<>(GPlusConfiguration.class).detectConfiguration(typesafe, "gplus");
-        GPlusUserDataProvider provider = new GPlusUserDataProvider(config);
-
-        Gson gson = new Gson();
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                if (datum.getDocument() instanceof String)
-                    json = (String) datum.getDocument();
-                else
-                    json = gson.toJson(datum.getDocument());
-                outStream.println(json);
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+public class GPlusUserDataProvider extends AbstractGPlusProvider {
+
+  public static final String STREAMS_ID = "GPlusUserDataProvider";
+
+  public GPlusUserDataProvider() {
+    super();
+  }
+
+  public GPlusUserDataProvider(GPlusConfiguration config) {
+    super(config);
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  protected Runnable getDataCollector(BackOffStrategy strategy, BlockingQueue<StreamsDatum> queue, Plus plus, UserInfo userInfo) {
+    return new GPlusUserDataCollector(plus, strategy, queue, userInfo);
+  }
+
+  /**
+   * Retrieve current profile status for a list of accounts.
+   * @param args args
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File file = new File(configfile);
+    assert (file.exists());
+    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = testResourceConfig.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    GPlusConfiguration config = new ComponentConfigurator<>(GPlusConfiguration.class).detectConfiguration(typesafe, "gplus");
+    GPlusUserDataProvider provider = new GPlusUserDataProvider(config);
+
+    Gson gson = new Gson();
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        if (datum.getDocument() instanceof String) {
+          json = (String) datum.getDocument();
+        } else {
+          json = gson.toJson(datum.getDocument());
+        }
+        outStream.println(json);
+      }
     }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 }



[14/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGenerator.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGenerator.java b/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGenerator.java
index 7f8eb25..7afa9fc 100644
--- a/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGenerator.java
+++ b/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGenerator.java
@@ -16,8 +16,16 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.cassandra;
 
+import org.apache.streams.util.schema.FieldType;
+import org.apache.streams.util.schema.FieldUtil;
+import org.apache.streams.util.schema.GenerationConfig;
+import org.apache.streams.util.schema.Schema;
+import org.apache.streams.util.schema.SchemaStore;
+import org.apache.streams.util.schema.SchemaStoreImpl;
+
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.base.Joiner;
@@ -25,12 +33,6 @@ import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.common.collect.Lists;
-import org.apache.streams.util.schema.FieldType;
-import org.apache.streams.util.schema.FieldUtil;
-import org.apache.streams.util.schema.GenerationConfig;
-import org.apache.streams.util.schema.Schema;
-import org.apache.streams.util.schema.SchemaStore;
-import org.apache.streams.util.schema.SchemaStoreImpl;
 import org.jsonschema2pojo.util.URLUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -44,341 +46,369 @@ import java.util.List;
 import java.util.Map;
 
 import static com.google.common.base.Preconditions.checkNotNull;
-import static org.apache.commons.lang3.StringUtils.defaultString;
 import static org.apache.streams.util.schema.FileUtil.dropExtension;
 import static org.apache.streams.util.schema.FileUtil.dropSourcePathPrefix;
 import static org.apache.streams.util.schema.FileUtil.resolveRecursive;
 import static org.apache.streams.util.schema.FileUtil.writeFile;
 
 /**
- * Created by sblackmon on 5/3/16.
+ * Resource Generator for Cassandra.
  */
 public class StreamsCassandraResourceGenerator implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsCassandraResourceGenerator.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsCassandraResourceGenerator.class);
 
-    private final static String LS = System.getProperty("line.separator");
+  private static final String LS = System.getProperty("line.separator");
 
-    private StreamsCassandraGenerationConfig config;
+  private StreamsCassandraGenerationConfig config;
 
-    private SchemaStore schemaStore = new SchemaStoreImpl();
+  private SchemaStore schemaStore = new SchemaStoreImpl();
 
-    private int currentDepth = 0;
+  private int currentDepth = 0;
 
-    public static void main(String[] args) {
-        StreamsCassandraGenerationConfig config = new StreamsCassandraGenerationConfig();
+  /**
+   * Run from CLI without Maven
+   *
+   * <p/>
+   * java -jar streams-plugin-cassandra-jar-with-dependencies.jar StreamsCassandraResourceGenerator src/main/jsonschema target/generated-resources
+   *
+   * @param args [sourceDirectory, targetDirectory]
+   */
+  public static void main(String[] args) {
+    StreamsCassandraGenerationConfig config = new StreamsCassandraGenerationConfig();
 
-        String sourceDirectory = "./src/main/jsonschema";
-        String targetDirectory = "./target/generated-resources/cassandra";
+    String sourceDirectory = "./src/main/jsonschema";
+    String targetDirectory = "./target/generated-resources/cassandra";
 
-        if( args.length > 0 )
-            sourceDirectory = args[0];
-        if( args.length > 1 )
-            targetDirectory = args[1];
-
-        config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
-
-        StreamsCassandraResourceGenerator streamsCassandraResourceGenerator = new StreamsCassandraResourceGenerator(config);
-        streamsCassandraResourceGenerator.run();
+    if ( args.length > 0 ) {
+      sourceDirectory = args[0];
     }
-
-    public StreamsCassandraResourceGenerator(StreamsCassandraGenerationConfig config) {
-        this.config = config;
+    if ( args.length > 1 ) {
+      targetDirectory = args[1];
     }
 
-    public void run() {
+    config.setSourceDirectory(sourceDirectory);
+    config.setTargetDirectory(targetDirectory);
 
-        checkNotNull(config);
+    StreamsCassandraResourceGenerator streamsCassandraResourceGenerator = new StreamsCassandraResourceGenerator(config);
+    streamsCassandraResourceGenerator.run();
+  }
 
-        generate(config);
+  public StreamsCassandraResourceGenerator(StreamsCassandraGenerationConfig config) {
+    this.config = config;
+  }
 
-    }
+  @Override
+  public void run() {
 
-    public void generate(StreamsCassandraGenerationConfig config) {
-
-        LinkedList<File> sourceFiles = new LinkedList<File>();
-
-        for (Iterator<URL> sources = config.getSource(); sources.hasNext();) {
-            URL source = sources.next();
-            sourceFiles.add(URLUtil.getFileFromURL(source));
-        }
+    checkNotNull(config);
 
-        LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
+    generate(config);
 
-        resolveRecursive((GenerationConfig)config, sourceFiles);
+  }
 
-        LOGGER.info("Resolved {} schema files:", sourceFiles.size());
+  /**
+   * run generate using supplied StreamsCassandraGenerationConfig.
+   * @param config StreamsCassandraGenerationConfig
+   */
+  public void generate(StreamsCassandraGenerationConfig config) {
 
-        for (Iterator<File> iterator = sourceFiles.iterator(); iterator.hasNext();) {
-            File item = iterator.next();
-            schemaStore.create(item.toURI());
-        }
+    LinkedList<File> sourceFiles = new LinkedList<File>();
 
-        LOGGER.info("Identified {} objects:", schemaStore.getSize());
+    for (Iterator<URL> sources = config.getSource(); sources.hasNext();) {
+      URL source = sources.next();
+      sourceFiles.add(URLUtil.getFileFromURL(source));
+    }
 
-        String outputFile = config.getTargetDirectory() + "/" + "types.cql";
-        StringBuilder typesContent = new StringBuilder();
+    LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
 
-        for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
-            Schema schema = schemaIterator.next();
-            currentDepth = 0;
-            if( schema.getURI().getScheme().equals("file")) {
-                String inputFile = schema.getURI().getPath();
-                String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-                for (String sourcePath : config.getSourcePaths()) {
-                    resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
-                }
+    resolveRecursive((GenerationConfig)config, sourceFiles);
 
-                String resourceId = schemaSymbol(schema);
+    LOGGER.info("Resolved {} schema files:", sourceFiles.size());
 
-                LOGGER.info("Processing {}", resourcePath);
+    for (Iterator<File> iterator = sourceFiles.iterator(); iterator.hasNext();) {
+      File item = iterator.next();
+      schemaStore.create(item.toURI());
+    }
 
-                String resourceContent = generateResource(schema, resourceId);
+    LOGGER.info("Identified {} objects:", schemaStore.getSize());
 
-                typesContent.append(resourceContent);
+    String outputFile = config.getTargetDirectory() + "/" + "types.cql";
+    StringBuilder typesContent = new StringBuilder();
 
-                LOGGER.info("Added {}", resourceId);
-            }
+    for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
+      Schema schema = schemaIterator.next();
+      currentDepth = 0;
+      if ( schema.getUri().getScheme().equals("file")) {
+        String inputFile = schema.getUri().getPath();
+        String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+        for (String sourcePath : config.getSourcePaths()) {
+          resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
         }
 
-        writeFile(outputFile, typesContent.toString());
-
-    }
+        String resourceId = schemaSymbol(schema);
 
-    public String generateResource(Schema schema, String resourceId) {
-        StringBuilder resourceBuilder = new StringBuilder();
-        resourceBuilder.append("CREATE TYPE ");
-        resourceBuilder.append(resourceId);
-        resourceBuilder.append(" IF NOT EXISTS (");
-        resourceBuilder.append(LS);
-        resourceBuilder = appendRootObject(resourceBuilder, schema, resourceId, ' ');
-        resourceBuilder.append(");");
-        resourceBuilder.append(LS);
-        return resourceBuilder.toString();
-    }
+        LOGGER.info("Processing {}", resourcePath);
 
-    public StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId, Character seperator) {
-        ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
-        if( propertiesNode.get("id") != null ) {
-            builder.append("id text PRIMARY KEY,");
-            builder.append(LS);
-            propertiesNode.remove("id");
-        }
-        if( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0) {
-            builder = appendPropertiesNode(builder, schema, propertiesNode, seperator);
-        }
-        return builder;
-    }
+        String resourceContent = generateResource(schema, resourceId);
 
-    private StringBuilder appendValueField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        builder.append(cqlEscape(fieldId));
-        builder.append(seperator);
-        builder.append(cqlType(fieldType));
-        return builder;
-    }
+        typesContent.append(resourceContent);
 
-    public StringBuilder appendArrayItems(StringBuilder builder, Schema schema, String fieldId, ObjectNode itemsNode, Character seperator) {
-        // not safe to append nothing
-        checkNotNull(builder);
-        if( itemsNode == null ) return builder;
-        if( itemsNode.has("type")) {
-            try {
-                FieldType itemType = FieldUtil.determineFieldType(itemsNode);
-                switch( itemType ) {
-                    case OBJECT:
-                        Schema objectSchema = null;
-                        URI parentURI = null;
-                        if( itemsNode.has("$ref") || itemsNode.has("extends") ) {
-                            JsonNode refNode = itemsNode.get("$ref");
-                            JsonNode extendsNode = itemsNode.get("extends");
-                            if (refNode != null && refNode.isValueNode())
-                                parentURI = URI.create(refNode.asText());
-                            else if (extendsNode != null && extendsNode.isObject())
-                                parentURI = URI.create(extendsNode.get("$ref").asText());
-                            URI absoluteURI;
-                            if (parentURI.isAbsolute())
-                                absoluteURI = parentURI;
-                            else {
-                                absoluteURI = schema.getURI().resolve(parentURI);
-                                if (!absoluteURI.isAbsolute() || (absoluteURI.isAbsolute() && !schemaStore.getByUri(absoluteURI).isPresent() ))
-                                    absoluteURI = schema.getParentURI().resolve(parentURI);
-                            }
-                            if (absoluteURI != null && absoluteURI.isAbsolute()) {
-                                Optional<Schema> schemaLookup = schemaStore.getByUri(absoluteURI);
-                                if (schemaLookup.isPresent()) {
-                                    objectSchema = schemaLookup.get();
-                                }
-                            }
-                        }
-                        // have to resolve schema here
-
-                        builder = appendArrayObject(builder, objectSchema, fieldId, seperator);
-                        break;
-                    case ARRAY:
-                        ObjectNode subArrayItems = (ObjectNode) itemsNode.get("items");
-                        builder = appendArrayItems(builder, schema, fieldId, subArrayItems, seperator);
-                        break;
-                    default:
-                        builder = appendArrayField(builder, schema, fieldId, itemType, seperator);
-                }
-            } catch (Exception e) {
-                LOGGER.warn("No item type resolvable for {}", fieldId);
-            }
-        }
-        checkNotNull(builder);
-        return builder;
+        LOGGER.info("Added {}", resourceId);
+      }
     }
 
-    private StringBuilder appendArrayField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        checkNotNull(fieldId);
-        builder.append(cqlEscape(fieldId));
-        builder.append(seperator);
-        builder.append("list<"+cqlType(fieldType)+">");
-        checkNotNull(builder);
-        return builder;
+    writeFile(outputFile, typesContent.toString());
+
+  }
+
+  /**
+   * generateResource String from schema and resourceId.
+   * @param schema Schema
+   * @param resourceId String
+   * @return CREATE TYPE ...
+   */
+  public String generateResource(Schema schema, String resourceId) {
+    StringBuilder resourceBuilder = new StringBuilder();
+    resourceBuilder.append("CREATE TYPE ");
+    resourceBuilder.append(resourceId);
+    resourceBuilder.append(" IF NOT EXISTS (");
+    resourceBuilder.append(LS);
+    resourceBuilder = appendRootObject(resourceBuilder, schema, resourceId, ' ');
+    resourceBuilder.append(");");
+    resourceBuilder.append(LS);
+    return resourceBuilder.toString();
+  }
+
+  protected StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId, Character seperator) {
+    ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
+    if ( propertiesNode.get("id") != null ) {
+      builder.append("id text PRIMARY KEY,");
+      builder.append(LS);
+      propertiesNode.remove("id");
     }
-
-    private StringBuilder appendArrayObject(StringBuilder builder, Schema schema, String fieldId, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        String schemaSymbol = schemaSymbol(schema);
-        if( !Strings.isNullOrEmpty(fieldId) && schemaSymbol != null ) {
-            builder.append(cqlEscape(fieldId));
-            builder.append(seperator);
-            builder.append("list<" + schemaSymbol + ">");
-            builder.append(LS);
-        }
-        checkNotNull(builder);
-        return builder;
+    if ( propertiesNode != null && propertiesNode.isObject() && propertiesNode.size() > 0) {
+      builder = appendPropertiesNode(builder, schema, propertiesNode, seperator);
     }
-
-    private StringBuilder appendSchemaField(StringBuilder builder, Schema schema, String fieldId, Character seperator) {
-        // safe to append nothing
-        checkNotNull(builder);
-        String schemaSymbol = schemaSymbol(schema);
-        if( !Strings.isNullOrEmpty(fieldId) && schemaSymbol != null ) {
-            builder.append(cqlEscape(fieldId));
-            builder.append(seperator);
-            builder.append(schemaSymbol);
-        }
-        checkNotNull(builder);
-        return builder;
+    return builder;
+  }
+
+  private StringBuilder appendValueField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    builder.append(cqlEscape(fieldId));
+    builder.append(seperator);
+    builder.append(cqlType(fieldType));
+    return builder;
+  }
+
+  protected StringBuilder appendArrayItems(StringBuilder builder, Schema schema, String fieldId, ObjectNode itemsNode, Character seperator) {
+    // not safe to append nothing
+    checkNotNull(builder);
+    if ( itemsNode == null ) {
+      return builder;
     }
-
-    /*
-     can this be moved to streams-schemas if schemastore available in scope?
-     maybe an interface?
-     lot of boilerplate / reuse between plugins
-     however treatment is way different when resolving a type symbol vs resolving and listing fields .
-     */
-    private StringBuilder appendPropertiesNode(StringBuilder builder, Schema schema, ObjectNode propertiesNode, Character seperator) {
-        checkNotNull(builder);
-        checkNotNull(propertiesNode);
-        Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
-        Joiner joiner = Joiner.on(","+LS).skipNulls();
-        List<String> fieldStrings = Lists.newArrayList();
-        for( ; fields.hasNext(); ) {
-            Map.Entry<String, JsonNode> field = fields.next();
-            String fieldId = field.getKey();
-            if( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
-                ObjectNode fieldNode = (ObjectNode) field.getValue();
-                FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
-                if (fieldType != null ) {
-                    switch (fieldType) {
-                        case ARRAY:
-                            ObjectNode itemsNode = (ObjectNode) fieldNode.get("items");
-                            if( currentDepth <= config.getMaxDepth()) {
-                                StringBuilder arrayItemsBuilder = appendArrayItems(new StringBuilder(), schema, fieldId, itemsNode, seperator);
-                                if( !Strings.isNullOrEmpty(arrayItemsBuilder.toString())) {
-                                    fieldStrings.add(arrayItemsBuilder.toString());
-                                }
-                            }
-                            break;
-                        case OBJECT:
-                            Schema objectSchema = null;
-                            URI parentURI = null;
-                            if( fieldNode.has("$ref") || fieldNode.has("extends") ) {
-                                JsonNode refNode = fieldNode.get("$ref");
-                                JsonNode extendsNode = fieldNode.get("extends");
-                                if (refNode != null && refNode.isValueNode())
-                                    parentURI = URI.create(refNode.asText());
-                                else if (extendsNode != null && extendsNode.isObject())
-                                    parentURI = URI.create(extendsNode.get("$ref").asText());
-                                URI absoluteURI;
-                                if (parentURI.isAbsolute())
-                                    absoluteURI = parentURI;
-                                else {
-                                    absoluteURI = schema.getURI().resolve(parentURI);
-                                    if (!absoluteURI.isAbsolute() || (absoluteURI.isAbsolute() && !schemaStore.getByUri(absoluteURI).isPresent() ))
-                                        absoluteURI = schema.getParentURI().resolve(parentURI);
-                                }
-                                if (absoluteURI != null && absoluteURI.isAbsolute()) {
-                                    Optional<Schema> schemaLookup = schemaStore.getByUri(absoluteURI);
-                                    if (schemaLookup.isPresent()) {
-                                        objectSchema = schemaLookup.get();
-                                    }
-                                }
-                            }
-                            //ObjectNode childProperties = schemaStore.resolveProperties(schema, fieldNode, fieldId);
-                            if( currentDepth < config.getMaxDepth()) {
-                                StringBuilder structFieldBuilder = appendSchemaField(new StringBuilder(), objectSchema, fieldId, seperator);
-                                if( !Strings.isNullOrEmpty(structFieldBuilder.toString())) {
-                                    fieldStrings.add(structFieldBuilder.toString());
-                                }
-                            }
-                            break;
-                        default:
-                            StringBuilder valueFieldBuilder = appendValueField(new StringBuilder(), schema, fieldId, fieldType, seperator);
-                            if( !Strings.isNullOrEmpty(valueFieldBuilder.toString())) {
-                                fieldStrings.add(valueFieldBuilder.toString());
-                            }
-                    }
+    if ( itemsNode.has("type")) {
+      try {
+        FieldType itemType = FieldUtil.determineFieldType(itemsNode);
+        switch ( itemType ) {
+          case OBJECT:
+            Schema objectSchema = null;
+            URI parentUri = null;
+            if ( itemsNode.has("$ref") || itemsNode.has("extends") ) {
+              JsonNode refNode = itemsNode.get("$ref");
+              JsonNode extendsNode = itemsNode.get("extends");
+              if (refNode != null && refNode.isValueNode()) {
+                parentUri = URI.create(refNode.asText());
+              } else if (extendsNode != null && extendsNode.isObject()) {
+                parentUri = URI.create(extendsNode.get("$ref").asText());
+              }
+              URI absoluteUri;
+              if (parentUri.isAbsolute()) {
+                absoluteUri = parentUri;
+              } else {
+                absoluteUri = schema.getUri().resolve(parentUri);
+                if (!absoluteUri.isAbsolute() || (absoluteUri.isAbsolute() && !schemaStore.getByUri(absoluteUri).isPresent() )) {
+                  absoluteUri = schema.getParentUri().resolve(parentUri);
+                }
+              }
+              if (absoluteUri != null && absoluteUri.isAbsolute()) {
+                Optional<Schema> schemaLookup = schemaStore.getByUri(absoluteUri);
+                if (schemaLookup.isPresent()) {
+                  objectSchema = schemaLookup.get();
                 }
+              }
             }
+            // have to resolve schema here
+
+            builder = appendArrayObject(builder, objectSchema, fieldId, seperator);
+            break;
+          case ARRAY:
+            ObjectNode subArrayItems = (ObjectNode) itemsNode.get("items");
+            builder = appendArrayItems(builder, schema, fieldId, subArrayItems, seperator);
+            break;
+          default:
+            builder = appendArrayField(builder, schema, fieldId, itemType, seperator);
         }
-        builder.append(joiner.join(fieldStrings)).append(LS);
-        Preconditions.checkNotNull(builder);
-        return builder;
+      } catch (Exception ex) {
+        LOGGER.warn("No item type resolvable for {}", fieldId);
+      }
     }
-
-    private static String cqlEscape( String fieldId ) {
-        return "`"+fieldId+"`";
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendArrayField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    checkNotNull(fieldId);
+    builder.append(cqlEscape(fieldId));
+    builder.append(seperator);
+    builder.append("list<" + cqlType(fieldType) + ">");
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendArrayObject(StringBuilder builder, Schema schema, String fieldId, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    String schemaSymbol = schemaSymbol(schema);
+    if ( !Strings.isNullOrEmpty(fieldId) && schemaSymbol != null ) {
+      builder.append(cqlEscape(fieldId));
+      builder.append(seperator);
+      builder.append("list<" + schemaSymbol + ">");
+      builder.append(LS);
     }
-
-    private static String cqlType( FieldType fieldType ) {
-        switch( fieldType ) {
-            case STRING:
-                return "text";
-            case INTEGER:
-                return "int";
-            case NUMBER:
-                return "double";
-            case OBJECT:
-                return "tuple";
+    checkNotNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendSchemaField(StringBuilder builder, Schema schema, String fieldId, Character seperator) {
+    // safe to append nothing
+    checkNotNull(builder);
+    String schemaSymbol = schemaSymbol(schema);
+    if ( !Strings.isNullOrEmpty(fieldId) && schemaSymbol != null ) {
+      builder.append(cqlEscape(fieldId));
+      builder.append(seperator);
+      builder.append(schemaSymbol);
+    }
+    checkNotNull(builder);
+    return builder;
+  }
+
+  /*
+   can this be moved to streams-schemas if schemastore available in scope?
+   maybe an interface?
+   lot of boilerplate / reuse between plugins
+   however treatment is way different when resolving a type symbol vs resolving and listing fields .
+   */
+  private StringBuilder appendPropertiesNode(StringBuilder builder, Schema schema, ObjectNode propertiesNode, Character seperator) {
+    checkNotNull(builder);
+    checkNotNull(propertiesNode);
+    Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
+    Joiner joiner = Joiner.on("," + LS).skipNulls();
+    List<String> fieldStrings = Lists.newArrayList();
+    for ( ; fields.hasNext(); ) {
+      Map.Entry<String, JsonNode> field = fields.next();
+      String fieldId = field.getKey();
+      if ( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
+        ObjectNode fieldNode = (ObjectNode) field.getValue();
+        FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
+        if (fieldType != null ) {
+          switch (fieldType) {
             case ARRAY:
-                return "list";
+              ObjectNode itemsNode = (ObjectNode) fieldNode.get("items");
+              if ( currentDepth <= config.getMaxDepth()) {
+                StringBuilder arrayItemsBuilder = appendArrayItems(new StringBuilder(), schema, fieldId, itemsNode, seperator);
+                if ( !Strings.isNullOrEmpty(arrayItemsBuilder.toString())) {
+                  fieldStrings.add(arrayItemsBuilder.toString());
+                }
+              }
+              break;
+            case OBJECT:
+              Schema objectSchema = null;
+              URI parentUri = null;
+              if ( fieldNode.has("$ref") || fieldNode.has("extends") ) {
+                JsonNode refNode = fieldNode.get("$ref");
+                JsonNode extendsNode = fieldNode.get("extends");
+                if (refNode != null && refNode.isValueNode()) {
+                  parentUri = URI.create(refNode.asText());
+                } else if (extendsNode != null && extendsNode.isObject()) {
+                  parentUri = URI.create(extendsNode.get("$ref").asText());
+                }
+                URI absoluteUri;
+                if (parentUri.isAbsolute()) {
+                  absoluteUri = parentUri;
+                } else {
+                  absoluteUri = schema.getUri().resolve(parentUri);
+                  if (!absoluteUri.isAbsolute() || (absoluteUri.isAbsolute() && !schemaStore.getByUri(absoluteUri).isPresent() )) {
+                    absoluteUri = schema.getParentUri().resolve(parentUri);
+                  }
+                }
+                if (absoluteUri != null && absoluteUri.isAbsolute()) {
+                  Optional<Schema> schemaLookup = schemaStore.getByUri(absoluteUri);
+                  if (schemaLookup.isPresent()) {
+                    objectSchema = schemaLookup.get();
+                  }
+                }
+              }
+              //ObjectNode childProperties = schemaStore.resolveProperties(schema, fieldNode, fieldId);
+              if ( currentDepth < config.getMaxDepth()) {
+                StringBuilder structFieldBuilder = appendSchemaField(new StringBuilder(), objectSchema, fieldId, seperator);
+                if ( !Strings.isNullOrEmpty(structFieldBuilder.toString())) {
+                  fieldStrings.add(structFieldBuilder.toString());
+                }
+              }
+              break;
             default:
-                return fieldType.name().toUpperCase();
+              StringBuilder valueFieldBuilder = appendValueField(new StringBuilder(), schema, fieldId, fieldType, seperator);
+              if ( !Strings.isNullOrEmpty(valueFieldBuilder.toString())) {
+                fieldStrings.add(valueFieldBuilder.toString());
+              }
+          }
         }
+      }
+    }
+    builder.append(joiner.join(fieldStrings)).append(LS);
+    Preconditions.checkNotNull(builder);
+    return builder;
+  }
+
+  private static String cqlEscape( String fieldId ) {
+    return "`" + fieldId + "`";
+  }
+
+  private static String cqlType( FieldType fieldType ) {
+    switch ( fieldType ) {
+      case STRING:
+        return "text";
+      case INTEGER:
+        return "int";
+      case NUMBER:
+        return "double";
+      case OBJECT:
+        return "tuple";
+      case ARRAY:
+        return "list";
+      default:
+        return fieldType.name().toUpperCase();
     }
+  }
 
-    private String schemaSymbol( Schema schema ) {
-        if (schema == null) return null;
-        // this needs to return whatever
-        if (schema.getURI().getScheme().equals("file")) {
-            String inputFile = schema.getURI().getPath();
-            String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-            for (String sourcePath : config.getSourcePaths()) {
-                resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
-            }
-            return dropExtension(resourcePath).replace("/", "_");
-        } else {
-            return "IDK";
-        }
+  private String schemaSymbol( Schema schema ) {
+    if (schema == null) {
+      return null;
+    }
+    // this needs to return whatever
+    if (schema.getUri().getScheme().equals("file")) {
+      String inputFile = schema.getUri().getPath();
+      String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+      for (String sourcePath : config.getSourcePaths()) {
+        resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
+      }
+      return dropExtension(resourcePath).replace("/", "_");
+    } else {
+      return "IDK";
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGeneratorMojo.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGeneratorMojo.java b/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGeneratorMojo.java
index 038e744..3625f28 100644
--- a/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGeneratorMojo.java
+++ b/streams-plugins/streams-plugin-cassandra/src/main/java/org/apache/streams/plugins/cassandra/StreamsCassandraResourceGeneratorMojo.java
@@ -16,6 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.cassandra;
 
 import org.apache.maven.plugin.AbstractMojo;
@@ -33,48 +34,63 @@ import org.slf4j.LoggerFactory;
 import java.io.File;
 import java.util.List;
 
-@Mojo(  name = "generate-resources",
-        defaultPhase = LifecyclePhase.GENERATE_RESOURCES
-)
-@Execute(   goal = "generate-resources",
-            phase = LifecyclePhase.GENERATE_RESOURCES
-)
+@Mojo (
+    name = "generate-resources",
+    defaultPhase = LifecyclePhase.GENERATE_RESOURCES
+    )
+@Execute (
+    goal = "generate-resources",
+    phase = LifecyclePhase.GENERATE_RESOURCES
+    )
+/**
+ * Run within a module containing a src/main/jsonschema directory.
+ *
+ * <p/>
+ * mvn org.apache.streams.plugins:streams-plugin-cassandra:0.4-incubating:cassandra
+ *
+ */
 public class StreamsCassandraResourceGeneratorMojo extends AbstractMojo {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsCassandraResourceGeneratorMojo.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsCassandraResourceGeneratorMojo.class);
 
-    private volatile MojoFailureException mojoFailureException;
+  private volatile MojoFailureException mojoFailureException;
 
-    @Component
-    private MavenProject project;
+  @Component
+  private MavenProject project;
 
-    @Parameter( defaultValue = "${project.basedir}", readonly = true )
-    private File basedir;
+  @Parameter( defaultValue = "${project.basedir}", readonly = true )
+  private File basedir;
 
-    @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
-    public String sourceDirectory;
+  @Parameter( defaultValue = "src/main/jsonschema", readonly = true ) // Maven 3 only
+  public String sourceDirectory;
 
-    @Parameter( readonly = true ) // Maven 3 only
-    public List<String> sourcePaths;
+  @Parameter( readonly = true ) // Maven 3 only
+  public List<String> sourcePaths;
 
-    @Parameter(defaultValue = "target/generated-resources/cassandra", readonly = true)
-    public String targetDirectory;
+  @Parameter(defaultValue = "target/generated-resources/cassandra", readonly = true)
+  public String targetDirectory;
 
-    public void execute() throws MojoExecutionException, MojoFailureException {
+  /**
+   * execute StreamsCassandraResourceGenerator mojo.
+   * @throws MojoExecutionException MojoExecutionException
+   * @throws MojoFailureException MojoFailureException
+   */
+  public void execute() throws MojoExecutionException, MojoFailureException {
 
-        //addProjectDependenciesToClasspath();
+    //addProjectDependenciesToClasspath();
 
-        StreamsCassandraGenerationConfig config = new StreamsCassandraGenerationConfig();
+    StreamsCassandraGenerationConfig config = new StreamsCassandraGenerationConfig();
 
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            config.setSourcePaths(sourcePaths);
-        else
-            config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      config.setSourcePaths(sourcePaths);
+    } else {
+      config.setSourceDirectory(sourceDirectory);
+    }
+    config.setTargetDirectory(targetDirectory);
 
-        StreamsCassandraResourceGenerator streamsCassandraResourceGenerator = new StreamsCassandraResourceGenerator(config);
+    StreamsCassandraResourceGenerator streamsCassandraResourceGenerator = new StreamsCassandraResourceGenerator(config);
 
-        streamsCassandraResourceGenerator.run();
-    }
+    streamsCassandraResourceGenerator.run();
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorCLITest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorCLITest.java b/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorCLITest.java
index cc288e5..63afc2d 100644
--- a/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorCLITest.java
+++ b/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorCLITest.java
@@ -16,12 +16,14 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.cassandra.test;
 
+import org.apache.streams.plugins.cassandra.StreamsCassandraResourceGenerator;
+
 import com.google.common.collect.Lists;
 import com.google.common.io.Files;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.streams.plugins.cassandra.StreamsCassandraResourceGenerator;
 import org.junit.Test;
 
 import java.io.File;
@@ -33,38 +35,38 @@ import java.util.List;
 import static org.apache.streams.plugins.cassandra.test.StreamsCassandraResourceGeneratorTest.cqlFilter;
 
 /**
- * Created by sblackmon on 5/5/16.
+ * Test that StreamsCassandraResourceGeneratorCLI generates resources.
  */
 public class StreamsCassandraResourceGeneratorCLITest {
 
-    @Test
-    public void testStreamsCassandraResourceGeneratorCLI() throws Exception {
+  @Test
+  public void testStreamsCassandraResourceGeneratorCLI() throws Exception {
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
-        String targetDirectory = "target/generated-resources/test-cli";
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    String targetDirectory = "target/generated-resources/test-cli";
 
-        List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
-        StreamsCassandraResourceGenerator.main(argsList.toArray(new String[0]));
+    List<String> argsList = Lists.newArrayList(sourceDirectory, targetDirectory);
+    StreamsCassandraResourceGenerator.main(argsList.toArray(new String[0]));
 
-        File testOutput = new File( targetDirectory );
+    File testOutput = new File( targetDirectory );
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(cqlFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 1 );
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(cqlFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 1 );
 
-        Path path = Paths.get(testOutput.getAbsolutePath()).resolve("types.cql");
+    Path path = Paths.get(testOutput.getAbsolutePath()).resolve("types.cql");
 
-        assert( path.toFile().exists() );
+    assert ( path.toFile().exists() );
 
-        String typesCqlBytes = new String(
-                java.nio.file.Files.readAllBytes(path));
+    String typesCqlBytes = new String(
+        java.nio.file.Files.readAllBytes(path));
 
-        assert( StringUtils.countMatches(typesCqlBytes, "CREATE TYPE") == 133 );
+    assert ( StringUtils.countMatches(typesCqlBytes, "CREATE TYPE") == 133 );
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorMojoIT.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorMojoIT.java b/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorMojoIT.java
index fc7765e..1eada8a 100644
--- a/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorMojoIT.java
+++ b/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorMojoIT.java
@@ -16,6 +16,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.cassandra.test;
 
 import com.google.common.collect.Lists;
@@ -38,59 +39,58 @@ import java.util.List;
 import static org.apache.streams.plugins.cassandra.test.StreamsCassandraResourceGeneratorTest.cqlFilter;
 
 /**
- * Tests that streams-plugin-hive running via maven generates hql resources
+ * Tests that streams-plugin-cassandra running via maven generates cql resources.
  */
 public class StreamsCassandraResourceGeneratorMojoIT extends TestCase {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsCassandraResourceGeneratorMojoIT.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsCassandraResourceGeneratorMojoIT.class);
 
-    protected void setUp() throws Exception
-    {
-        // required for mojo lookups to work
-        super.setUp();
-    }
+  protected void setUp() throws Exception {
+    // required for mojo lookups to work
+    super.setUp();
+  }
 
-    @Test
-    public void testStreamsCassandraResourceGeneratorMojo() throws Exception {
+  @Test
+  public void testStreamsCassandraResourceGeneratorMojo() throws Exception {
 
-        File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-cassandra" );
+    File testDir = ResourceExtractor.simpleExtractResources( getClass(), "/streams-plugin-cassandra" );
 
-        Verifier verifier;
+    Verifier verifier;
 
-        verifier = new Verifier( testDir.getAbsolutePath() );
+    verifier = new Verifier( testDir.getAbsolutePath() );
 
-        List cliOptions = new ArrayList();
-        cliOptions.add( "-N" );
-        verifier.executeGoals( Lists.<String>newArrayList(
-                "clean",
-                "dependency:unpack-dependencies",
-                "generate-resources"));
+    List cliOptions = new ArrayList();
+    cliOptions.add( "-N" );
+    verifier.executeGoals( Lists.<String>newArrayList(
+        "clean",
+        "dependency:unpack-dependencies",
+        "generate-resources"));
 
-        verifier.verifyErrorFreeLog();
+    verifier.verifyErrorFreeLog();
 
-        verifier.resetStreams();
+    verifier.resetStreams();
 
-        Path testOutputPath = Paths.get(testDir.getAbsolutePath()).resolve("target/generated-resources/test-mojo");
+    Path testOutputPath = Paths.get(testDir.getAbsolutePath()).resolve("target/generated-resources/test-mojo");
 
-        File testOutput = testOutputPath.toFile();
+    File testOutput = testOutputPath.toFile();
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(cqlFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 1 );
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(cqlFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 1 );
 
-        Path path = testOutputPath.resolve("types.cql");
+    Path path = testOutputPath.resolve("types.cql");
 
-        assert( path.toFile().exists() );
+    assert ( path.toFile().exists() );
 
-        String typesCqlBytes = new String(
-                java.nio.file.Files.readAllBytes(path));
+    String typesCqlBytes = new String(
+        java.nio.file.Files.readAllBytes(path));
 
-        assert( StringUtils.countMatches(typesCqlBytes, "CREATE TYPE") == 133 );
+    assert ( StringUtils.countMatches(typesCqlBytes, "CREATE TYPE") == 133 );
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorTest.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorTest.java b/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorTest.java
index 0ebdb2c..210831f 100644
--- a/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorTest.java
+++ b/streams-plugins/streams-plugin-cassandra/src/test/java/org/apache/streams/plugins/cassandra/test/StreamsCassandraResourceGeneratorTest.java
@@ -16,83 +16,87 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.cassandra.test;
 
+import org.apache.streams.plugins.cassandra.StreamsCassandraGenerationConfig;
+import org.apache.streams.plugins.cassandra.StreamsCassandraResourceGenerator;
+
 import com.google.common.base.Predicate;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 import com.google.common.io.Files;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.streams.plugins.cassandra.StreamsCassandraGenerationConfig;
-import org.apache.streams.plugins.cassandra.StreamsCassandraResourceGenerator;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.annotation.Nullable;
 import java.io.File;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.Collection;
+import javax.annotation.Nullable;
 
 /**
  * Test that cassandra resources are generated.
  */
 public class StreamsCassandraResourceGeneratorTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsCassandraResourceGeneratorTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsCassandraResourceGeneratorTest.class);
 
-    public static final Predicate<File> cqlFilter = new Predicate<File>() {
-        @Override
-        public boolean apply(@Nullable File file) {
-            if( file.getName().endsWith(".cql") )
-                return true;
-            else return false;
-        }
-    };
+  public static final Predicate<File> cqlFilter = new Predicate<File>() {
+    @Override
+    public boolean apply(@Nullable File file) {
+      if ( file.getName().endsWith(".cql") ) {
+        return true;
+      } else {
+        return false;
+      }
+    }
+  };
 
-    /**
-     * Test that cassandra resources are generated
-     *
-     * @throws Exception
-     */
-    @Test
-    public void StreamsCassandraResourceGenerator() throws Exception {
+  /**
+   * Test that cassandra resources are generated.
+   *
+   * @throws Exception Exception
+   */
+  @Test
+  public void testStreamsCassandraResourceGenerator() throws Exception {
 
-        StreamsCassandraGenerationConfig config = new StreamsCassandraGenerationConfig();
+    StreamsCassandraGenerationConfig config = new StreamsCassandraGenerationConfig();
 
-        String sourceDirectory = "target/test-classes/activitystreams-schemas";
+    String sourceDirectory = "target/test-classes/activitystreams-schemas";
 
-        config.setSourceDirectory(sourceDirectory);
+    config.setSourceDirectory(sourceDirectory);
 
-        config.setTargetDirectory("target/generated-resources/cassandra");
+    config.setTargetDirectory("target/generated-resources/cassandra");
 
-        config.setExclusions(Sets.newHashSet("attachments"));
+    config.setExclusions(Sets.newHashSet("attachments"));
 
-        config.setMaxDepth(2);
+    config.setMaxDepth(2);
 
-        StreamsCassandraResourceGenerator streamsCassandraResourceGenerator = new StreamsCassandraResourceGenerator(config);
-        streamsCassandraResourceGenerator.run();
+    StreamsCassandraResourceGenerator streamsCassandraResourceGenerator = new StreamsCassandraResourceGenerator(config);
+    streamsCassandraResourceGenerator.run();
 
-        File testOutput = config.getTargetDirectory();
+    File testOutput = config.getTargetDirectory();
 
-        assert( testOutput != null );
-        assert( testOutput.exists() == true );
-        assert( testOutput.isDirectory() == true );
+    assert ( testOutput != null );
+    assert ( testOutput.exists() == true );
+    assert ( testOutput.isDirectory() == true );
 
-        Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
-                .filter(cqlFilter);
-        Collection<File> outputCollection = Lists.newArrayList(outputIterator);
-        assert( outputCollection.size() == 1 );
+    Iterable<File> outputIterator = Files.fileTreeTraverser().breadthFirstTraversal(testOutput)
+        .filter(cqlFilter);
+    Collection<File> outputCollection = Lists.newArrayList(outputIterator);
+    assert ( outputCollection.size() == 1 );
 
-        Path path = Paths.get(testOutput.getAbsolutePath()).resolve("types.cql");
+    Path path = Paths.get(testOutput.getAbsolutePath()).resolve("types.cql");
 
-        assert( path.toFile().exists() );
+    assert ( path.toFile().exists() );
 
-        String typesCqlBytes = new String(
-                java.nio.file.Files.readAllBytes(path));
+    String typesCqlBytes = new String(
+        java.nio.file.Files.readAllBytes(path));
 
-        assert( StringUtils.countMatches(typesCqlBytes, "CREATE TYPE") == 133 );
+    assert ( StringUtils.countMatches(typesCqlBytes, "CREATE TYPE") == 133 );
 
-    }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchGenerationConfig.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchGenerationConfig.java b/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchGenerationConfig.java
index 3e109a8..2a51a0c 100644
--- a/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchGenerationConfig.java
+++ b/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchGenerationConfig.java
@@ -16,9 +16,11 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.elasticsearch;
 
 import org.apache.streams.util.schema.GenerationConfig;
+
 import org.jsonschema2pojo.DefaultGenerationConfig;
 import org.jsonschema2pojo.util.URLUtil;
 
@@ -32,68 +34,71 @@ import java.util.List;
 import java.util.Set;
 
 /**
- * Configures StreamsElasticsearchResourceGenerator
- *
- *
+ * Configures StreamsElasticsearchResourceGenerator.
  */
 public class StreamsElasticsearchGenerationConfig extends DefaultGenerationConfig implements GenerationConfig {
 
-    public String getSourceDirectory() {
-        return sourceDirectory;
-    }
+  public String getSourceDirectory() {
+    return sourceDirectory;
+  }
 
-    public List<String> getSourcePaths() {
-        return sourcePaths;
-    }
+  public List<String> getSourcePaths() {
+    return sourcePaths;
+  }
 
-    private String sourceDirectory;
-    private List<String> sourcePaths = new ArrayList<String>();
-    private String targetDirectory;
-    private int maxDepth = 1;
+  private String sourceDirectory;
+  private List<String> sourcePaths = new ArrayList<String>();
+  private String targetDirectory;
+  private int maxDepth = 1;
 
-    public Set<String> getExclusions() {
-        return exclusions;
-    }
+  public Set<String> getExclusions() {
+    return exclusions;
+  }
 
-    public void setExclusions(Set<String> exclusions) {
-        this.exclusions = exclusions;
-    }
+  public void setExclusions(Set<String> exclusions) {
+    this.exclusions = exclusions;
+  }
 
-    private Set<String> exclusions = new HashSet<String>();
+  private Set<String> exclusions = new HashSet<String>();
 
-    public int getMaxDepth() {
-        return maxDepth;
-    }
+  public int getMaxDepth() {
+    return maxDepth;
+  }
 
-    public void setSourceDirectory(String sourceDirectory) {
-        this.sourceDirectory = sourceDirectory;
-    }
+  public void setSourceDirectory(String sourceDirectory) {
+    this.sourceDirectory = sourceDirectory;
+  }
 
-    public void setSourcePaths(List<String> sourcePaths) {
-        this.sourcePaths = sourcePaths;
-    }
+  public void setSourcePaths(List<String> sourcePaths) {
+    this.sourcePaths = sourcePaths;
+  }
 
-    public void setTargetDirectory(String targetDirectory) {
-        this.targetDirectory = targetDirectory;
-    }
+  public void setTargetDirectory(String targetDirectory) {
+    this.targetDirectory = targetDirectory;
+  }
 
-    public File getTargetDirectory() {
-        return new File(targetDirectory);
-    }
+  public File getTargetDirectory() {
+    return new File(targetDirectory);
+  }
 
-    public Iterator<URL> getSource() {
-        if (null != sourceDirectory) {
-            return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
-        }
-        List<URL> sourceURLs = new ArrayList<URL>();
-        if( sourcePaths != null && sourcePaths.size() > 0)
-            for (String source : sourcePaths) {
-                sourceURLs.add(URLUtil.parseURL(source));
-            }
-        return sourceURLs.iterator();
+  /**
+   * get all sources.
+   * @return Iterator of URL
+   */
+  public Iterator<URL> getSource() {
+    if (null != sourceDirectory) {
+      return Collections.singleton(URLUtil.parseURL(sourceDirectory)).iterator();
     }
-
-    public void setMaxDepth(int maxDepth) {
-        this.maxDepth = maxDepth;
+    List<URL> sourceUrls = new ArrayList<URL>();
+    if ( sourcePaths != null && sourcePaths.size() > 0) {
+      for (String source : sourcePaths) {
+        sourceUrls.add(URLUtil.parseURL(source));
+      }
     }
+    return sourceUrls.iterator();
+  }
+
+  public void setMaxDepth(int maxDepth) {
+    this.maxDepth = maxDepth;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGenerator.java
----------------------------------------------------------------------
diff --git a/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGenerator.java b/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGenerator.java
index 96e2ecd..47db819 100644
--- a/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGenerator.java
+++ b/streams-plugins/streams-plugin-elasticsearch/src/main/java/org/apache/streams/plugins/elasticsearch/StreamsElasticsearchResourceGenerator.java
@@ -16,15 +16,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.plugins.elasticsearch;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.base.Joiner;
-import com.google.common.base.Optional;
-import com.google.common.base.Strings;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.util.schema.FieldType;
 import org.apache.streams.util.schema.FieldUtil;
@@ -32,6 +26,14 @@ import org.apache.streams.util.schema.GenerationConfig;
 import org.apache.streams.util.schema.Schema;
 import org.apache.streams.util.schema.SchemaStore;
 import org.apache.streams.util.schema.SchemaStoreImpl;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.base.Joiner;
+import com.google.common.base.Optional;
+import com.google.common.base.Strings;
 import org.jsonschema2pojo.util.URLUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -53,340 +55,370 @@ import static org.apache.streams.util.schema.FileUtil.writeFile;
 
 public class StreamsElasticsearchResourceGenerator implements Runnable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsElasticsearchResourceGenerator.class);
-
-    private ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-
-    private final static String LS = System.getProperty("line.separator");
-
-    private StreamsElasticsearchGenerationConfig config;
+  private static final Logger LOGGER = LoggerFactory.getLogger(StreamsElasticsearchResourceGenerator.class);
 
-    private SchemaStore schemaStore = new SchemaStoreImpl();
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-    private int currentDepth = 0;
+  private static final String LS = System.getProperty("line.separator");
 
-    public static void main(String[] args) {
-        StreamsElasticsearchGenerationConfig config = new StreamsElasticsearchGenerationConfig();
+  private StreamsElasticsearchGenerationConfig config;
 
-        String sourceDirectory = "src/main/jsonschema";
-        String targetDirectory = "target/generated-resources/streams-plugin-elasticsearch";
+  private SchemaStore schemaStore = new SchemaStoreImpl();
 
-        if( args.length > 0 )
-            sourceDirectory = args[0];
-        if( args.length > 1 )
-            targetDirectory = args[1];
+  private int currentDepth = 0;
 
-        config.setSourceDirectory(sourceDirectory);
-        config.setTargetDirectory(targetDirectory);
+  /**
+   * Run from CLI without Maven
+   *
+   * <p/>
+   * java -jar streams-plugin-elasticsearch-jar-with-dependencies.jar StreamsElasticsearchResourceGenerator src/main/jsonschema target/generated-resources
+   *
+   * @param args [sourceDirectory, targetDirectory]
+   */
+  public static void main(String[] args) {
+    StreamsElasticsearchGenerationConfig config = new StreamsElasticsearchGenerationConfig();
 
-        StreamsElasticsearchResourceGenerator streamsElasticsearchResourceGenerator = new StreamsElasticsearchResourceGenerator(config);
-        streamsElasticsearchResourceGenerator.run();
+    String sourceDirectory = "src/main/jsonschema";
+    String targetDirectory = "target/generated-resources/streams-plugin-elasticsearch";
 
+    if ( args.length > 0 ) {
+      sourceDirectory = args[0];
     }
-
-    public StreamsElasticsearchResourceGenerator(StreamsElasticsearchGenerationConfig config) {
-        this.config = config;
+    if ( args.length > 1 ) {
+      targetDirectory = args[1];
     }
 
-    public void run() {
+    config.setSourceDirectory(sourceDirectory);
+    config.setTargetDirectory(targetDirectory);
 
-        Objects.requireNonNull(config);
+    StreamsElasticsearchResourceGenerator streamsElasticsearchResourceGenerator = new StreamsElasticsearchResourceGenerator(config);
+    streamsElasticsearchResourceGenerator.run();
 
-        generate(config);
+  }
 
-    }
+  public StreamsElasticsearchResourceGenerator(StreamsElasticsearchGenerationConfig config) {
+    this.config = config;
+  }
 
-    public void generate(StreamsElasticsearchGenerationConfig config) {
+  @Override
+  public void run() {
 
-        List<File> sourceFiles = new LinkedList<>();
+    Objects.requireNonNull(config);
 
-        for (Iterator<URL> sources = config.getSource(); sources.hasNext();) {
-            URL source = sources.next();
-            sourceFiles.add(URLUtil.getFileFromURL(source));
-        }
+    generate(config);
 
-        LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
+  }
 
-        resolveRecursive((GenerationConfig)config, sourceFiles);
+  /**
+   * run generate using supplied StreamsElasticsearchGenerationConfig.
+   * @param config StreamsElasticsearchGenerationConfig
+   */
+  public void generate(StreamsElasticsearchGenerationConfig config) {
 
-        LOGGER.info("Resolved {} schema files:", sourceFiles.size());
+    List<File> sourceFiles = new LinkedList<>();
 
-        for (File item : sourceFiles) {
-            schemaStore.create(item.toURI());
-        }
-
-        LOGGER.info("Identified {} objects:", schemaStore.getSize());
+    for (Iterator<URL> sources = config.getSource(); sources.hasNext();) {
+      URL source = sources.next();
+      sourceFiles.add(URLUtil.getFileFromURL(source));
+    }
 
-        StringBuilder typesContent = new StringBuilder();
+    LOGGER.info("Seeded with {} source paths:", sourceFiles.size());
 
-        for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
-            Schema schema = schemaIterator.next();
-            currentDepth = 0;
-            if( schema.getURI().getScheme().equals("file")) {
-                String inputFile = schema.getURI().getPath();
-                String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-                for (String sourcePath : config.getSourcePaths()) {
-                    resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
-                }
-                String outputFile = config.getTargetDirectory() + "/" + resourcePath;
+    resolveRecursive((GenerationConfig)config, sourceFiles);
 
-                LOGGER.info("Processing {}:", resourcePath);
+    LOGGER.info("Resolved {} schema files:", sourceFiles.size());
 
-                String resourceId = schemaSymbol(schema);
+    for (File item : sourceFiles) {
+      schemaStore.create(item.toURI());
+    }
 
-                String resourceContent = generateResource(schema, resourceId);
+    LOGGER.info("Identified {} objects:", schemaStore.getSize());
 
-                if( !Strings.isNullOrEmpty(resourceContent))
-                    writeFile(outputFile, resourceContent);
+    StringBuilder typesContent = new StringBuilder();
 
-                LOGGER.info("Wrote {}:", outputFile);
-            }
+    for (Iterator<Schema> schemaIterator = schemaStore.getSchemaIterator(); schemaIterator.hasNext(); ) {
+      Schema schema = schemaIterator.next();
+      currentDepth = 0;
+      if ( schema.getUri().getScheme().equals("file")) {
+        String inputFile = schema.getUri().getPath();
+        String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+        for (String sourcePath : config.getSourcePaths()) {
+          resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
         }
+        String outputFile = config.getTargetDirectory() + "/" + resourcePath;
 
-    }
-
-    public String generateResource(Schema schema, String resourceId) {
-        StringBuilder resourceBuilder = new StringBuilder();
+        LOGGER.info("Processing {}:", resourcePath);
 
-        ObjectNode rootNode = (ObjectNode) schema.getContent();
+        String resourceId = schemaSymbol(schema);
 
-        // remove java*
-        // remove description
-        // resolve all $ref
-        // replace format: date with type: date
-        // replace format: date-time with type: date
-        // replace array of primitive with just primitive
+        String resourceContent = generateResource(schema, resourceId);
 
-        try {
-            String objectString = MAPPER.writeValueAsString(rootNode);
-            resourceBuilder.append(objectString);
-        } catch (JsonProcessingException e) {
-            LOGGER.error("{}: {}", e.getClass().getName(), e);
+        if ( !Strings.isNullOrEmpty(resourceContent)) {
+          writeFile(outputFile, resourceContent);
         }
-        return resourceBuilder.toString();
-    }
 
-    public StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId, Character seperator) {
-        ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
-        if( propertiesNode.get("id") != null ) {
-            builder.append("id text PRIMARY KEY,");
-            builder.append(LS);
-            propertiesNode.remove("id");
-        }
-        if( propertiesNode.isObject() && propertiesNode.size() > 0) {
-            builder = appendPropertiesNode(builder, schema, propertiesNode, seperator);
-        }
-        return builder;
+        LOGGER.info("Wrote {}:", outputFile);
+      }
     }
 
-    private StringBuilder appendValueField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
-        // safe to append nothing
-        Objects.requireNonNull(builder);
-        builder.append(cqlEscape(fieldId));
-        builder.append(seperator);
-        builder.append(cqlType(fieldType));
-        return builder;
+  }
+
+  /**
+   * generateResource String from schema and resourceId.
+   * @param schema Schema
+   * @param resourceId String
+   * @return mapping
+   */
+  public String generateResource(Schema schema, String resourceId) {
+    StringBuilder resourceBuilder = new StringBuilder();
+
+    ObjectNode rootNode = (ObjectNode) schema.getContent();
+
+    // remove java*
+    // remove description
+    // resolve all $ref
+    // replace format: date with type: date
+    // replace format: date-time with type: date
+    // replace array of primitive with just primitive
+
+    try {
+      String objectString = MAPPER.writeValueAsString(rootNode);
+      resourceBuilder.append(objectString);
+    } catch (JsonProcessingException ex) {
+      LOGGER.error("{}: {}", ex.getClass().getName(), ex);
     }
-
-    public StringBuilder appendArrayItems(StringBuilder builder, Schema schema, String fieldId, ObjectNode itemsNode, Character seperator) {
-        // not safe to append nothing
-        Objects.requireNonNull(builder);
-        if( itemsNode == null ) return builder;
-        if( itemsNode.has("type")) {
-            try {
-                FieldType itemType = FieldUtil.determineFieldType(itemsNode);
-                switch( itemType ) {
-                    case OBJECT:
-                        Schema objectSchema = null;
-                        URI parentURI = null;
-                        if( itemsNode.has("$ref") || itemsNode.has("extends") ) {
-                            JsonNode refNode = itemsNode.get("$ref");
-                            JsonNode extendsNode = itemsNode.get("extends");
-                            if (refNode != null && refNode.isValueNode())
-                                parentURI = URI.create(refNode.asText());
-                            else if (extendsNode != null && extendsNode.isObject())
-                                parentURI = URI.create(extendsNode.get("$ref").asText());
-                            URI absoluteURI;
-                            if (parentURI.isAbsolute())
-                                absoluteURI = parentURI;
-                            else {
-                                absoluteURI = schema.getURI().resolve(parentURI);
-                                if (!absoluteURI.isAbsolute() || (absoluteURI.isAbsolute() && !schemaStore.getByUri(absoluteURI).isPresent() ))
-                                    absoluteURI = schema.getParentURI().resolve(parentURI);
-                            }
-                            if (absoluteURI.isAbsolute()) {
-                                Optional<Schema> schemaLookup = schemaStore.getByUri(absoluteURI);
-                                if (schemaLookup.isPresent()) {
-                                    objectSchema = schemaLookup.get();
-                                }
-                            }
-                        }
-                        // have to resolve schema here
-
-                        builder = appendArrayObject(builder, objectSchema, fieldId, seperator);
-                        break;
-                    case ARRAY:
-                        ObjectNode subArrayItems = (ObjectNode) itemsNode.get("items");
-                        builder = appendArrayItems(builder, schema, fieldId, subArrayItems, seperator);
-                        break;
-                    default:
-                        builder = appendArrayField(builder, schema, fieldId, itemType, seperator);
-                }
-            } catch (Exception e) {
-                LOGGER.warn("No item type resolvable for {}", fieldId);
-            }
-        }
-        Objects.requireNonNull(builder);
-        return builder;
+    return resourceBuilder.toString();
+  }
+
+  protected StringBuilder appendRootObject(StringBuilder builder, Schema schema, String resourceId, Character seperator) {
+    ObjectNode propertiesNode = schemaStore.resolveProperties(schema, null, resourceId);
+    if ( propertiesNode.get("id") != null ) {
+      builder.append("id text PRIMARY KEY,");
+      builder.append(LS);
+      propertiesNode.remove("id");
     }
-
-    private StringBuilder appendArrayField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
-        // safe to append nothing
-        Objects.requireNonNull(builder);
-        Objects.requireNonNull(fieldId);
-        builder.append(cqlEscape(fieldId));
-        builder.append(seperator);
-        builder.append("list<").append(cqlType(fieldType)).append(">");
-        Objects.requireNonNull(builder);
-        return builder;
+    if ( propertiesNode.isObject() && propertiesNode.size() > 0) {
+      builder = appendPropertiesNode(builder, schema, propertiesNode, seperator);
     }
-
-    private StringBuilder appendArrayObject(StringBuilder builder, Schema schema, String fieldId, Character seperator) {
-        // safe to append nothing
-        Objects.requireNonNull(builder);
-        String schemaSymbol = schemaSymbol(schema);
-        if( !Strings.isNullOrEmpty(fieldId) && schemaSymbol != null ) {
-            builder.append(cqlEscape(fieldId));
-            builder.append(seperator);
-            builder.append("list<").append(schemaSymbol).append(">");
-            builder.append(LS);
-        }
-        Objects.requireNonNull(builder);
-        return builder;
+    return builder;
+  }
+
+  private StringBuilder appendValueField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
+    // safe to append nothing
+    Objects.requireNonNull(builder);
+    builder.append(cqlEscape(fieldId));
+    builder.append(seperator);
+    builder.append(cqlType(fieldType));
+    return builder;
+  }
+
+  protected StringBuilder appendArrayItems(StringBuilder builder, Schema schema, String fieldId, ObjectNode itemsNode, Character seperator) {
+    // not safe to append nothing
+    Objects.requireNonNull(builder);
+    if ( itemsNode == null ) {
+      return builder;
     }
-
-    private StringBuilder appendSchemaField(StringBuilder builder, Schema schema, String fieldId, Character seperator) {
-        // safe to append nothing
-        Objects.requireNonNull(builder);
-        String schemaSymbol = schemaSymbol(schema);
-        if( !Strings.isNullOrEmpty(fieldId) && schemaSymbol != null ) {
-            builder.append(cqlEscape(fieldId));
-            builder.append(seperator);
-            builder.append(schemaSymbol);
-        }
-        Objects.requireNonNull(builder);
-        return builder;
-    }
-
-    /*
-     can this be moved to streams-schemas if schemastore available in scope?
-     maybe an interface?
-     lot of boilerplate / reuse between plugins
-     however treatment is way different when resolving a type symbol vs resolving and listing fields .
-     */
-    private StringBuilder appendPropertiesNode(StringBuilder builder, Schema schema, ObjectNode propertiesNode, Character seperator) {
-        Objects.requireNonNull(builder);
-        Objects.requireNonNull(propertiesNode);
-        Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
-        Joiner joiner = Joiner.on(","+LS).skipNulls();
-        List<String> fieldStrings = new ArrayList<>();
-        for( ; fields.hasNext(); ) {
-            Map.Entry<String, JsonNode> field = fields.next();
-            String fieldId = field.getKey();
-            if( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
-                ObjectNode fieldNode = (ObjectNode) field.getValue();
-                FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
-                if (fieldType != null ) {
-                    switch (fieldType) {
-                        case ARRAY:
-                            ObjectNode itemsNode = (ObjectNode) fieldNode.get("items");
-                            if( currentDepth <= config.getMaxDepth()) {
-                                StringBuilder arrayItemsBuilder = appendArrayItems(new StringBuilder(), schema, fieldId, itemsNode, seperator);
-                                if( !Strings.isNullOrEmpty(arrayItemsBuilder.toString())) {
-                                    fieldStrings.add(arrayItemsBuilder.toString());
-                                }
-                            }
-                            break;
-                        case OBJECT:
-                            Schema objectSchema = null;
-                            URI parentURI = null;
-                            if( fieldNode.has("$ref") || fieldNode.has("extends") ) {
-                                JsonNode refNode = fieldNode.get("$ref");
-                                JsonNode extendsNode = fieldNode.get("extends");
-                                if (refNode != null && refNode.isValueNode())
-                                    parentURI = URI.create(refNode.asText());
-                                else if (extendsNode != null && extendsNode.isObject())
-                                    parentURI = URI.create(extendsNode.get("$ref").asText());
-                                URI absoluteURI;
-                                if (parentURI.isAbsolute())
-                                    absoluteURI = parentURI;
-                                else {
-                                    absoluteURI = schema.getURI().resolve(parentURI);
-                                    if (!absoluteURI.isAbsolute() || (absoluteURI.isAbsolute() && !schemaStore.getByUri(absoluteURI).isPresent() ))
-                                        absoluteURI = schema.getParentURI().resolve(parentURI);
-                                }
-                                if (absoluteURI.isAbsolute()) {
-                                    Optional<Schema> schemaLookup = schemaStore.getByUri(absoluteURI);
-                                    if (schemaLookup.isPresent()) {
-                                        objectSchema = schemaLookup.get();
-                                    }
-                                }
-                            }
-                            //ObjectNode childProperties = schemaStore.resolveProperties(schema, fieldNode, fieldId);
-                            if( currentDepth < config.getMaxDepth()) {
-                                StringBuilder structFieldBuilder = appendSchemaField(new StringBuilder(), objectSchema, fieldId, seperator);
-                                if( !Strings.isNullOrEmpty(structFieldBuilder.toString())) {
-                                    fieldStrings.add(structFieldBuilder.toString());
-                                }
-                            }
-                            break;
-                        default:
-                            StringBuilder valueFieldBuilder = appendValueField(new StringBuilder(), schema, fieldId, fieldType, seperator);
-                            if( !Strings.isNullOrEmpty(valueFieldBuilder.toString())) {
-                                fieldStrings.add(valueFieldBuilder.toString());
-                            }
-                    }
+    if ( itemsNode.has("type")) {
+      try {
+        FieldType itemType = FieldUtil.determineFieldType(itemsNode);
+        switch ( itemType ) {
+          case OBJECT:
+            Schema objectSchema = null;
+            URI parentUri = null;
+            if ( itemsNode.has("$ref") || itemsNode.has("extends") ) {
+              JsonNode refNode = itemsNode.get("$ref");
+              JsonNode extendsNode = itemsNode.get("extends");
+              if (refNode != null && refNode.isValueNode()) {
+                parentUri = URI.create(refNode.asText());
+              } else if (extendsNode != null && extendsNode.isObject()) {
+                parentUri = URI.create(extendsNode.get("$ref").asText());
+              }
+              URI absoluteUri;
+              if (parentUri.isAbsolute()) {
+                absoluteUri = parentUri;
+              } else {
+                absoluteUri = schema.getUri().resolve(parentUri);
+                if (!absoluteUri.isAbsolute() || (absoluteUri.isAbsolute() && !schemaStore.getByUri(absoluteUri).isPresent() )) {
+                  absoluteUri = schema.getParentUri().resolve(parentUri);
+                }
+              }
+              if (absoluteUri.isAbsolute()) {
+                Optional<Schema> schemaLookup = schemaStore.getByUri(absoluteUri);
+                if (schemaLookup.isPresent()) {
+                  objectSchema = schemaLookup.get();
                 }
+              }
             }
+            // have to resolve schema here
+
+            builder = appendArrayObject(builder, objectSchema, fieldId, seperator);
+            break;
+          case ARRAY:
+            ObjectNode subArrayItems = (ObjectNode) itemsNode.get("items");
+            builder = appendArrayItems(builder, schema, fieldId, subArrayItems, seperator);
+            break;
+          default:
+            builder = appendArrayField(builder, schema, fieldId, itemType, seperator);
         }
-        builder.append(joiner.join(fieldStrings)).append(LS);
-        Objects.requireNonNull(builder);
-        return builder;
+      } catch (Exception ex) {
+        LOGGER.warn("No item type resolvable for {}", fieldId);
+      }
     }
-
-    private static String cqlEscape( String fieldId ) {
-        return "`"+fieldId+"`";
+    Objects.requireNonNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendArrayField(StringBuilder builder, Schema schema, String fieldId, FieldType fieldType, Character seperator) {
+    // safe to append nothing
+    Objects.requireNonNull(builder);
+    Objects.requireNonNull(fieldId);
+    builder.append(cqlEscape(fieldId));
+    builder.append(seperator);
+    builder.append("list<").append(cqlType(fieldType)).append(">");
+    Objects.requireNonNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendArrayObject(StringBuilder builder, Schema schema, String fieldId, Character seperator) {
+    // safe to append nothing
+    Objects.requireNonNull(builder);
+    String schemaSymbol = schemaSymbol(schema);
+    if ( !Strings.isNullOrEmpty(fieldId) && schemaSymbol != null ) {
+      builder.append(cqlEscape(fieldId));
+      builder.append(seperator);
+      builder.append("list<").append(schemaSymbol).append(">");
+      builder.append(LS);
     }
-
-    private static String cqlType( FieldType fieldType ) {
-        switch( fieldType ) {
-            case STRING:
-                return "text";
-            case INTEGER:
-                return "int";
-            case NUMBER:
-                return "double";
-            case OBJECT:
-                return "tuple";
+    Objects.requireNonNull(builder);
+    return builder;
+  }
+
+  private StringBuilder appendSchemaField(StringBuilder builder, Schema schema, String fieldId, Character seperator) {
+    // safe to append nothing
+    Objects.requireNonNull(builder);
+    String schemaSymbol = schemaSymbol(schema);
+    if ( !Strings.isNullOrEmpty(fieldId) && schemaSymbol != null ) {
+      builder.append(cqlEscape(fieldId));
+      builder.append(seperator);
+      builder.append(schemaSymbol);
+    }
+    Objects.requireNonNull(builder);
+    return builder;
+  }
+
+  /*
+   can this be moved to streams-schemas if schemastore available in scope?
+   maybe an interface?
+   lot of boilerplate / reuse between plugins
+   however treatment is way different when resolving a type symbol vs resolving and listing fields .
+   */
+  private StringBuilder appendPropertiesNode(StringBuilder builder, Schema schema, ObjectNode propertiesNode, Character seperator) {
+    Objects.requireNonNull(builder);
+    Objects.requireNonNull(propertiesNode);
+    Iterator<Map.Entry<String, JsonNode>> fields = propertiesNode.fields();
+    Joiner joiner = Joiner.on("," + LS).skipNulls();
+    List<String> fieldStrings = new ArrayList<>();
+    for ( ; fields.hasNext(); ) {
+      Map.Entry<String, JsonNode> field = fields.next();
+      String fieldId = field.getKey();
+      if ( !config.getExclusions().contains(fieldId) && field.getValue().isObject()) {
+        ObjectNode fieldNode = (ObjectNode) field.getValue();
+        FieldType fieldType = FieldUtil.determineFieldType(fieldNode);
+        if (fieldType != null ) {
+          switch (fieldType) {
             case ARRAY:
-                return "list";
+              ObjectNode itemsNode = (ObjectNode) fieldNode.get("items");
+              if ( currentDepth <= config.getMaxDepth()) {
+                StringBuilder arrayItemsBuilder = appendArrayItems(new StringBuilder(), schema, fieldId, itemsNode, seperator);
+                if ( !Strings.isNullOrEmpty(arrayItemsBuilder.toString())) {
+                  fieldStrings.add(arrayItemsBuilder.toString());
+                }
+              }
+              break;
+            case OBJECT:
+              Schema objectSchema = null;
+              URI parentUri = null;
+              if ( fieldNode.has("$ref") || fieldNode.has("extends") ) {
+                JsonNode refNode = fieldNode.get("$ref");
+                JsonNode extendsNode = fieldNode.get("extends");
+                if (refNode != null && refNode.isValueNode()) {
+                  parentUri = URI.create(refNode.asText());
+                } else if (extendsNode != null && extendsNode.isObject()) {
+                  parentUri = URI.create(extendsNode.get("$ref").asText());
+                }
+                URI absoluteUri;
+                if (parentUri.isAbsolute()) {
+                  absoluteUri = parentUri;
+                } else {
+                  absoluteUri = schema.getUri().resolve(parentUri);
+                  if (!absoluteUri.isAbsolute() || (absoluteUri.isAbsolute() && !schemaStore.getByUri(absoluteUri).isPresent() )) {
+                    absoluteUri = schema.getParentUri().resolve(parentUri);
+                  }
+                }
+                if (absoluteUri.isAbsolute()) {
+                  Optional<Schema> schemaLookup = schemaStore.getByUri(absoluteUri);
+                  if (schemaLookup.isPresent()) {
+                    objectSchema = schemaLookup.get();
+                  }
+                }
+              }
+              //ObjectNode childProperties = schemaStore.resolveProperties(schema, fieldNode, fieldId);
+              if ( currentDepth < config.getMaxDepth()) {
+                StringBuilder structFieldBuilder = appendSchemaField(new StringBuilder(), objectSchema, fieldId, seperator);
+                if ( !Strings.isNullOrEmpty(structFieldBuilder.toString())) {
+                  fieldStrings.add(structFieldBuilder.toString());
+                }
+              }
+              break;
             default:
-                return fieldType.name().toUpperCase();
+              StringBuilder valueFieldBuilder = appendValueField(new StringBuilder(), schema, fieldId, fieldType, seperator);
+              if ( !Strings.isNullOrEmpty(valueFieldBuilder.toString())) {
+                fieldStrings.add(valueFieldBuilder.toString());
+              }
+          }
         }
+      }
+    }
+    builder.append(joiner.join(fieldStrings)).append(LS);
+    Objects.requireNonNull(builder);
+    return builder;
+  }
+
+  private static String cqlEscape( String fieldId ) {
+    return "`" + fieldId + "`";
+  }
+
+  private static String cqlType( FieldType fieldType ) {
+    switch ( fieldType ) {
+      case STRING:
+        return "text";
+      case INTEGER:
+        return "int";
+      case NUMBER:
+        return "double";
+      case OBJECT:
+        return "tuple";
+      case ARRAY:
+        return "list";
+      default:
+        return fieldType.name().toUpperCase();
     }
+  }
 
-    private String schemaSymbol( Schema schema ) {
-        if (schema == null) return null;
-        // this needs to return whatever
-        if (schema.getURI().getScheme().equals("file")) {
-            String inputFile = schema.getURI().getPath();
-            String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
-            for (String sourcePath : config.getSourcePaths()) {
-                resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
-            }
-            return dropExtension(resourcePath).replace("/", "_");
-        } else {
-            return "IDK";
-        }
+  private String schemaSymbol( Schema schema ) {
+    if (schema == null) {
+      return null;
+    }
+    // this needs to return whatever
+    if (schema.getUri().getScheme().equals("file")) {
+      String inputFile = schema.getUri().getPath();
+      String resourcePath = dropSourcePathPrefix(inputFile, config.getSourceDirectory());
+      for (String sourcePath : config.getSourcePaths()) {
+        resourcePath = dropSourcePathPrefix(resourcePath, sourcePath);
+      }
+      return dropExtension(resourcePath).replace("/", "_");
+    } else {
+      return "IDK";
     }
+  }
 }


[31/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookProvider.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookProvider.java
index e907082..be59bd7 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookProvider.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookProvider.java
@@ -15,16 +15,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.facebook.provider;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Queues;
-import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.Futures;
-import com.google.common.util.concurrent.ListenableFuture;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.typesafe.config.ConfigRenderOptions;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProvider;
@@ -34,6 +27,16 @@ import org.apache.streams.facebook.IdConfig;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.util.ComponentUtils;
 import org.apache.streams.util.SerializationUtil;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Queues;
+import com.google.common.collect.Sets;
+import com.google.common.util.concurrent.Futures;
+import com.google.common.util.concurrent.ListenableFuture;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.typesafe.config.ConfigRenderOptions;
+
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -44,7 +47,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.*;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Executors;
 import java.util.concurrent.atomic.AtomicBoolean;
 
 /**
@@ -52,103 +56,109 @@ import java.util.concurrent.atomic.AtomicBoolean;
  */
 public abstract class FacebookProvider implements StreamsProvider {
 
-    private final static String STREAMS_ID = "FacebookProvider";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookProvider.class);
-    private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-    private static final int MAX_BATCH_SIZE = 2000;
-
-    protected FacebookConfiguration configuration;
-    protected BlockingQueue<StreamsDatum> datums;
-
-    private AtomicBoolean isComplete;
-    private ListeningExecutorService executor;
-    List<ListenableFuture<Object>> futures = new ArrayList<>();
-
-    private FacebookDataCollector dataCollector;
-
-    public FacebookProvider() {
-        try {
-            this.configuration = MAPPER.readValue(StreamsConfigurator.config.getConfig("facebook").root().render(ConfigRenderOptions.concise()), FacebookConfiguration.class);
-        } catch (IOException ioe) {
-            LOGGER.error("Exception trying to read default config : {}", ioe);
-        }
-    }
-
-    public FacebookProvider(FacebookConfiguration configuration) {
-        this.configuration = (FacebookConfiguration) SerializationUtil.cloneBySerialization(configuration);
-    }
-
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  private static final String STREAMS_ID = "FacebookProvider";
 
-    @Override
-    public void startStream() {
-        ListenableFuture future = executor.submit(getDataCollector());
-        futures.add(future);
-        executor.shutdown();
-    }
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookProvider.class);
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static final int MAX_BATCH_SIZE = 2000;
 
-    protected abstract FacebookDataCollector getDataCollector();
-
-    @Override
-    public StreamsResultSet readCurrent() {
-        int batchSize = 0;
-        BlockingQueue<StreamsDatum> batch = Queues.newLinkedBlockingQueue();
-        while(!this.datums.isEmpty() && batchSize < MAX_BATCH_SIZE) {
-            ComponentUtils.offerUntilSuccess(ComponentUtils.pollWhileNotEmpty(this.datums), batch);
-            ++batchSize;
-        }
-        return new StreamsResultSet(batch);
-    }
+  protected FacebookConfiguration configuration;
+  protected BlockingQueue<StreamsDatum> datums;
 
-    @Override
-    public StreamsResultSet readNew(BigInteger sequence) {
-        return null;
-    }
+  private AtomicBoolean isComplete;
+  private ListeningExecutorService executor;
+  List<ListenableFuture<Object>> futures = new ArrayList<>();
 
-    @Override
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        return null;
-    }
+  private FacebookDataCollector dataCollector;
 
-    @Override
-    public void prepare(Object configurationObject) {
-        this.datums = Queues.newLinkedBlockingQueue();
-        this.isComplete = new AtomicBoolean(false);
-        this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1));
+  /**
+   * FacebookProvider constructor - resolves FacebookConfiguration from JVM 'facebook'.
+   */
+  public FacebookProvider() {
+    try {
+      this.configuration = MAPPER.readValue(StreamsConfigurator.config.getConfig("facebook").root().render(ConfigRenderOptions.concise()), FacebookConfiguration.class);
+    } catch (IOException ioe) {
+      LOGGER.error("Exception trying to read default config : {}", ioe);
     }
-
-    @Override
-    public void cleanUp() {
-        ComponentUtils.shutdownExecutor(executor, 5, 5);
-        executor = null;
+  }
+
+  /**
+   * FacebookProvider constructor - uses supplied FacebookConfiguration.
+   */
+  public FacebookProvider(FacebookConfiguration configuration) {
+    this.configuration = (FacebookConfiguration) SerializationUtil.cloneBySerialization(configuration);
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+    ListenableFuture future = executor.submit(getDataCollector());
+    futures.add(future);
+    executor.shutdown();
+  }
+
+  protected abstract FacebookDataCollector getDataCollector();
+
+  @Override
+  public StreamsResultSet readCurrent() {
+    int batchSize = 0;
+    BlockingQueue<StreamsDatum> batch = Queues.newLinkedBlockingQueue();
+    while (!this.datums.isEmpty() && batchSize < MAX_BATCH_SIZE) {
+      ComponentUtils.offerUntilSuccess(ComponentUtils.pollWhileNotEmpty(this.datums), batch);
+      ++batchSize;
     }
-
-    /**
-     * Overrides the ids and addedAfter time in the configuration
-     * @param idsToAfterDate
-     */
-    public void overrideIds(Map<String, DateTime> idsToAfterDate) {
-        Set<IdConfig> ids = Sets.newHashSet();
-        for(String id : idsToAfterDate.keySet()) {
-            IdConfig idConfig = new IdConfig();
-            idConfig.setId(id);
-            idConfig.setAfterDate(idsToAfterDate.get(id));
-            ids.add(idConfig);
-        }
-        this.configuration.setIds(ids);
+    return new StreamsResultSet(batch);
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    return null;
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    return null;
+  }
+
+  @Override
+  public void prepare(Object configurationObject) {
+    this.datums = Queues.newLinkedBlockingQueue();
+    this.isComplete = new AtomicBoolean(false);
+    this.executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1));
+  }
+
+  @Override
+  public void cleanUp() {
+    ComponentUtils.shutdownExecutor(executor, 5, 5);
+    executor = null;
+  }
+
+  /**
+   * Overrides the ids and addedAfter time in the configuration.
+   * @param idsToAfterDate idsToAfterDate
+   */
+  public void overrideIds(Map<String, DateTime> idsToAfterDate) {
+    Set<IdConfig> ids = Sets.newHashSet();
+    for (String id : idsToAfterDate.keySet()) {
+      IdConfig idConfig = new IdConfig();
+      idConfig.setId(id);
+      idConfig.setAfterDate(idsToAfterDate.get(id));
+      ids.add(idConfig);
     }
-
-    @Override
-    public boolean isRunning() {
-        if (datums.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
-            LOGGER.info("Completed");
-            isComplete.set(true);
-            LOGGER.info("Exiting");
-        }
-        return !isComplete.get();
+    this.configuration.setIds(ids);
+  }
+
+  @Override
+  public boolean isRunning() {
+    if (datums.isEmpty() && executor.isTerminated() && Futures.allAsList(futures).isDone()) {
+      LOGGER.info("Completed");
+      isComplete.set(true);
+      LOGGER.info("Exiting");
     }
+    return !isComplete.get();
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserInformationProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserInformationProvider.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserInformationProvider.java
index 1262106..3939f23 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserInformationProvider.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserInformationProvider.java
@@ -18,24 +18,24 @@
 
 package org.apache.streams.facebook.provider;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigRenderOptions;
-import facebook4j.*;
-import facebook4j.conf.ConfigurationBuilder;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProvider;
 import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.facebook.FacebookUserInformationConfiguration;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigRenderOptions;
+
+import org.apache.commons.lang.NotImplementedException;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.commons.lang.NotImplementedException;
 
 import java.io.IOException;
 import java.io.Serializable;
@@ -44,262 +44,291 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Queue;
-import java.util.concurrent.*;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-public class FacebookUserInformationProvider implements StreamsProvider, Serializable
-{
+import facebook4j.Facebook;
+import facebook4j.FacebookException;
+import facebook4j.FacebookFactory;
+import facebook4j.Friend;
+import facebook4j.Paging;
+import facebook4j.ResponseList;
+import facebook4j.User;
+import facebook4j.conf.ConfigurationBuilder;
 
-    public static final String STREAMS_ID = "FacebookUserInformationProvider";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookUserInformationProvider.class);
+public class FacebookUserInformationProvider implements StreamsProvider, Serializable {
 
-    private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  public static final String STREAMS_ID = "FacebookUserInformationProvider";
 
-    private static final String ALL_PERMISSIONS = "ads_management,ads_read,create_event,create_note,email,export_stream,friends_about_me,friends_actions.books,friends_actions.music,friends_actions.news,friends_actions.video,friends_activities,friends_birthday,friends_education_history,friends_events,friends_games_activity,friends_groups,friends_hometown,friends_interests,friends_likes,friends_location,friends_notes,friends_online_presence,friends_photo_video_tags,friends_photos,friends_questions,friends_relationship_details,friends_relationships,friends_religion_politics,friends_status,friends_subscriptions,friends_videos,friends_website,friends_work_history,manage_friendlists,manage_notifications,manage_pages,photo_upload,publish_actions,publish_stream,read_friendlists,read_insights,read_mailbox,read_page_mailboxes,read_requests,read_stream,rsvp_event,share_item,sms,status_update,user_about_me,user_actions.books,user_actions.music,user_actions.news,user_actions.video,user_activitie
 s,user_birthday,user_education_history,user_events,user_friends,user_games_activity,user_groups,user_hometown,user_interests,user_likes,user_location,user_notes,user_online_presence,user_photo_video_tags,user_photos,user_questions,user_relationship_details,user_relationships,user_religion_politics,user_status,user_subscriptions,user_videos,user_website,user_work_history,video_upload,xmpp_login";
-    private FacebookUserInformationConfiguration facebookUserInformationConfiguration;
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookUserInformationProvider.class);
 
-    private Class klass;
-    protected volatile Queue<StreamsDatum> providerQueue = new LinkedBlockingQueue<StreamsDatum>();
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    public FacebookUserInformationConfiguration getConfig()              { return facebookUserInformationConfiguration; }
+  private static final String ALL_PERMISSIONS = "ads_management,ads_read,create_event,create_note,email,export_stream,friends_about_me,friends_actions.books,friends_actions.music,friends_actions.news,friends_actions.video,friends_activities,friends_birthday,friends_education_history,friends_events,friends_games_activity,friends_groups,friends_hometown,friends_interests,friends_likes,friends_location,friends_notes,friends_online_presence,friends_photo_video_tags,friends_photos,friends_questions,friends_relationship_details,friends_relationships,friends_religion_politics,friends_status,friends_subscriptions,friends_videos,friends_website,friends_work_history,manage_friendlists,manage_notifications,manage_pages,photo_upload,publish_actions,publish_stream,read_friendlists,read_insights,read_mailbox,read_page_mailboxes,read_requests,read_stream,rsvp_event,share_item,sms,status_update,user_about_me,user_actions.books,user_actions.music,user_actions.news,user_actions.video,user_activities,
 user_birthday,user_education_history,user_events,user_friends,user_games_activity,user_groups,user_hometown,user_interests,user_likes,user_location,user_notes,user_online_presence,user_photo_video_tags,user_photos,user_questions,user_relationship_details,user_relationships,user_religion_politics,user_status,user_subscriptions,user_videos,user_website,user_work_history,video_upload,xmpp_login";
+  private FacebookUserInformationConfiguration facebookUserInformationConfiguration;
 
-    public void setConfig(FacebookUserInformationConfiguration config)   { this.facebookUserInformationConfiguration = config; }
+  private Class klass;
+  protected volatile Queue<StreamsDatum> providerQueue = new LinkedBlockingQueue<StreamsDatum>();
 
-    protected Iterator<String[]> idsBatches;
+  public FacebookUserInformationConfiguration getConfig() {
+    return facebookUserInformationConfiguration;
+  }
 
-    protected ExecutorService executor;
+  public void setConfig(FacebookUserInformationConfiguration config) {
+    this.facebookUserInformationConfiguration = config;
+  }
 
-    protected DateTime start;
-    protected DateTime end;
+  protected Iterator<String[]> idsBatches;
 
-    protected final AtomicBoolean running = new AtomicBoolean();
+  protected ExecutorService executor;
 
-    private static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
-        return new ThreadPoolExecutor(nThreads, nThreads,
-                5000L, TimeUnit.MILLISECONDS,
-                new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
-    }
+  protected DateTime start;
+  protected DateTime end;
 
-    public FacebookUserInformationProvider() {
-        Config config = StreamsConfigurator.config.getConfig("facebook");
-        FacebookUserInformationConfiguration facebookUserInformationConfiguration;
-        try {
-            facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return;
-        }
-    }
+  protected final AtomicBoolean running = new AtomicBoolean();
 
-    public FacebookUserInformationProvider(FacebookUserInformationConfiguration config) {
-        this.facebookUserInformationConfiguration = config;
-    }
+  private static ExecutorService newFixedThreadPoolWithQueueSize(int numThreads, int queueSize) {
+    return new ThreadPoolExecutor(numThreads, numThreads,
+        5000L, TimeUnit.MILLISECONDS,
+        new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
+  }
 
-    public FacebookUserInformationProvider(Class klass) {
-        Config config = StreamsConfigurator.config.getConfig("facebook");
-        FacebookUserInformationConfiguration facebookUserInformationConfiguration;
-        try {
-            facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return;
-        }
-        this.klass = klass;
+  /**
+   * FacebookUserInformationProvider constructor - resolves FacebookUserInformationConfiguration from JVM 'facebook'.
+   */
+  public FacebookUserInformationProvider() {
+    Config config = StreamsConfigurator.config.getConfig("facebook");
+    FacebookUserInformationConfiguration facebookUserInformationConfiguration;
+    try {
+      facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return;
     }
-
-    public FacebookUserInformationProvider(FacebookUserInformationConfiguration config, Class klass) {
-        this.facebookUserInformationConfiguration = config;
-        this.klass = klass;
+  }
+
+  /**
+   * FacebookUserInformationProvider constructor - uses supplie FacebookUserInformationConfiguration.
+   * @param config
+   */
+  public FacebookUserInformationProvider(FacebookUserInformationConfiguration config) {
+    this.facebookUserInformationConfiguration = config;
+  }
+
+  public FacebookUserInformationProvider(Class klass) {
+    Config config = StreamsConfigurator.config.getConfig("facebook");
+    FacebookUserInformationConfiguration facebookUserInformationConfiguration;
+    try {
+      facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return;
     }
-
-    public Queue<StreamsDatum> getProviderQueue() {
-        return this.providerQueue;
+    this.klass = klass;
+  }
+
+  public FacebookUserInformationProvider(FacebookUserInformationConfiguration config, Class klass) {
+    this.facebookUserInformationConfiguration = config;
+    this.klass = klass;
+  }
+
+  public Queue<StreamsDatum> getProviderQueue() {
+    return this.providerQueue;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+    running.set(true);
+  }
+
+  @Override
+  public StreamsResultSet readCurrent() {
+
+    Preconditions.checkArgument(idsBatches.hasNext());
+
+    LOGGER.info("readCurrent");
+
+    Facebook client = getFacebookClient();
+
+    try {
+      User me = client.users().getMe();
+      String json = mapper.writeValueAsString(me);
+      providerQueue.add(
+          new StreamsDatum(json, DateTime.now())
+      );
+    } catch (JsonProcessingException ex) {
+      ex.printStackTrace();
+    } catch (FacebookException ex) {
+      ex.printStackTrace();
     }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
-
-    @Override
-    public void startStream() {
-        running.set(true);
-    }
-
-    public StreamsResultSet readCurrent() {
-
-        Preconditions.checkArgument(idsBatches.hasNext());
-
-        LOGGER.info("readCurrent");
-
-        Facebook client = getFacebookClient();
-
+    if (idsBatches.hasNext()) {
+      while (idsBatches.hasNext()) {
         try {
-            User me = client.users().getMe();
-            String json = mapper.writeValueAsString(me);
-            providerQueue.add(
-                new StreamsDatum(json, DateTime.now())
-            );
-        } catch (JsonProcessingException e) {
-            e.printStackTrace();
-        } catch (FacebookException e) {
-            e.printStackTrace();
-        }
+          List<User> userList = client.users().getUsers(idsBatches.next());
+          for (User user : userList) {
 
-        if( idsBatches.hasNext()) {
-            while (idsBatches.hasNext()) {
-                try {
-                    List<User> userList = client.users().getUsers(idsBatches.next());
-                    for (User user : userList) {
-
-                        try {
-                            String json = mapper.writeValueAsString(user);
-                            providerQueue.add(
-                                    new StreamsDatum(json, DateTime.now())
-                            );
-                        } catch (JsonProcessingException e) {
-                            //                        e.printStackTrace();
-                        }
-                    }
-
-                } catch (FacebookException e) {
-                    e.printStackTrace();
-                }
-            }
-        } else {
             try {
-                ResponseList<Friend> friendResponseList = client.friends().getFriends();
-                Paging<Friend> friendPaging;
-                do {
-
-                    for( Friend friend : friendResponseList ) {
-
-                        String json;
-                        try {
-                            json = mapper.writeValueAsString(friend);
-                            providerQueue.add(
-                                    new StreamsDatum(json)
-                            );
-                        } catch (JsonProcessingException e) {
-//                        e.printStackTrace();
-                        }
-                    }
-                    friendPaging = friendResponseList.getPaging();
-                    friendResponseList = client.fetchNext(friendPaging);
-                } while( friendPaging != null &&
-                        friendResponseList != null );
-            } catch (FacebookException e) {
-                e.printStackTrace();
+              String json = mapper.writeValueAsString(user);
+              providerQueue.add(
+                  new StreamsDatum(json, DateTime.now())
+              );
+            } catch (JsonProcessingException ex) {
+              LOGGER.trace("JsonProcessingException", ex);
             }
+          }
 
+        } catch (FacebookException ex) {
+          ex.printStackTrace();
         }
+      }
+    } else {
+      try {
+        ResponseList<Friend> friendResponseList = client.friends().getFriends();
+        Paging<Friend> friendPaging;
+        do {
 
-        LOGGER.info("Finished.  Cleaning up...");
-
-        LOGGER.info("Providing {} docs", providerQueue.size());
-
-        StreamsResultSet result =  new StreamsResultSet(providerQueue);
-        running.set(false);
+          for ( Friend friend : friendResponseList ) {
 
-        LOGGER.info("Exiting");
-
-        return result;
-
-    }
-
-    public StreamsResultSet readNew(BigInteger sequence) {
-        LOGGER.debug("{} readNew", STREAMS_ID);
-        throw new NotImplementedException();
-    }
-
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        LOGGER.debug("{} readRange", STREAMS_ID);
-        this.start = start;
-        this.end = end;
-        readCurrent();
-        StreamsResultSet result = (StreamsResultSet)providerQueue.iterator();
-        return result;
-    }
+            String json;
+            try {
+              json = mapper.writeValueAsString(friend);
+              providerQueue.add(
+                  new StreamsDatum(json)
+              );
+            } catch (JsonProcessingException ex) {
+              LOGGER.trace("JsonProcessingException", ex);
+            }
+          }
+          friendPaging = friendResponseList.getPaging();
+          friendResponseList = client.fetchNext(friendPaging);
+        }
+        while ( friendPaging != null
+                &&
+                friendResponseList != null );
+      } catch (FacebookException ex) {
+        ex.printStackTrace();
+      }
 
-    @Override
-    public boolean isRunning() {
-        return running.get();
     }
 
-    void shutdownAndAwaitTermination(ExecutorService pool) {
-        pool.shutdown(); // Disable new tasks from being submitted
-        try {
-            // Wait a while for existing tasks to terminate
-            if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
-                pool.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!pool.awaitTermination(10, TimeUnit.SECONDS))
-                    System.err.println("Pool did not terminate");
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            pool.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
+    LOGGER.info("Finished.  Cleaning up...");
+
+    LOGGER.info("Providing {} docs", providerQueue.size());
+
+    StreamsResultSet result =  new StreamsResultSet(providerQueue);
+    running.set(false);
+
+    LOGGER.info("Exiting");
+
+    return result;
+
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    LOGGER.debug("{} readNew", STREAMS_ID);
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    LOGGER.debug("{} readRange", STREAMS_ID);
+    this.start = start;
+    this.end = end;
+    readCurrent();
+    StreamsResultSet result = (StreamsResultSet)providerQueue.iterator();
+    return result;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return running.get();
+  }
+
+  void shutdownAndAwaitTermination(ExecutorService pool) {
+    pool.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+        pool.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+          System.err.println("Pool did not terminate");
         }
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      pool.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
     }
+  }
 
-    @Override
-    public void prepare(Object o) {
-
-        executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, 20));
+  @Override
+  public void prepare(Object configurationObject) {
 
-        Preconditions.checkNotNull(providerQueue);
-        Preconditions.checkNotNull(this.klass);
-        Preconditions.checkNotNull(facebookUserInformationConfiguration.getOauth().getAppId());
-        Preconditions.checkNotNull(facebookUserInformationConfiguration.getOauth().getAppSecret());
-        Preconditions.checkNotNull(facebookUserInformationConfiguration.getOauth().getUserAccessToken());
-        Preconditions.checkNotNull(facebookUserInformationConfiguration.getInfo());
+    executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, 20));
 
-        List<String> ids = new ArrayList<String>();
-        List<String[]> idsBatches = new ArrayList<String[]>();
+    Preconditions.checkNotNull(providerQueue);
+    Preconditions.checkNotNull(this.klass);
+    Preconditions.checkNotNull(facebookUserInformationConfiguration.getOauth().getAppId());
+    Preconditions.checkNotNull(facebookUserInformationConfiguration.getOauth().getAppSecret());
+    Preconditions.checkNotNull(facebookUserInformationConfiguration.getOauth().getUserAccessToken());
+    Preconditions.checkNotNull(facebookUserInformationConfiguration.getInfo());
 
-        for(String s : facebookUserInformationConfiguration.getInfo()) {
-            if(s != null)
-            {
-                ids.add(s);
+    List<String> ids = new ArrayList<String>();
+    List<String[]> idsBatches = new ArrayList<String[]>();
 
-                if(ids.size() >= 100) {
-                    // add the batch
-                    idsBatches.add(ids.toArray(new String[ids.size()]));
-                    // reset the Ids
-                    ids = new ArrayList<String>();
-                }
+    for (String s : facebookUserInformationConfiguration.getInfo()) {
+      if (s != null) {
+        ids.add(s);
 
-            }
+        if (ids.size() >= 100) {
+          // add the batch
+          idsBatches.add(ids.toArray(new String[ids.size()]));
+          // reset the Ids
+          ids = new ArrayList<String>();
         }
 
-        if(ids.size() > 0)
-            idsBatches.add(ids.toArray(new String[ids.size()]));
-
-        this.idsBatches = idsBatches.iterator();
+      }
     }
 
-    protected Facebook getFacebookClient()
-    {
-        ConfigurationBuilder cb = new ConfigurationBuilder();
-        cb.setDebugEnabled(true)
-            .setOAuthAppId(facebookUserInformationConfiguration.getOauth().getAppId())
-            .setOAuthAppSecret(facebookUserInformationConfiguration.getOauth().getAppSecret())
-            .setOAuthAccessToken(facebookUserInformationConfiguration.getOauth().getUserAccessToken())
-            .setOAuthPermissions(ALL_PERMISSIONS)
-            .setJSONStoreEnabled(true)
-            .setClientVersion("v1.0");
-
-        FacebookFactory ff = new FacebookFactory(cb.build());
-        Facebook facebook = ff.getInstance();
-
-        return facebook;
+    if (ids.size() > 0) {
+      idsBatches.add(ids.toArray(new String[ids.size()]));
     }
 
-    @Override
-    public void cleanUp() {
-        shutdownAndAwaitTermination(executor);
-    }
+    this.idsBatches = idsBatches.iterator();
+  }
+
+  protected Facebook getFacebookClient() {
+    ConfigurationBuilder cb = new ConfigurationBuilder();
+    cb.setDebugEnabled(true)
+        .setOAuthAppId(facebookUserInformationConfiguration.getOauth().getAppId())
+        .setOAuthAppSecret(facebookUserInformationConfiguration.getOauth().getAppSecret())
+        .setOAuthAccessToken(facebookUserInformationConfiguration.getOauth().getUserAccessToken())
+        .setOAuthPermissions(ALL_PERMISSIONS)
+        .setJSONStoreEnabled(true)
+        .setClientVersion("v1.0");
+
+    FacebookFactory ff = new FacebookFactory(cb.build());
+    Facebook facebook = ff.getInstance();
+
+    return facebook;
+  }
+
+  @Override
+  public void cleanUp() {
+    shutdownAndAwaitTermination(executor);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserstreamProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserstreamProvider.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserstreamProvider.java
index 0f2121a..b292d30 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserstreamProvider.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/FacebookUserstreamProvider.java
@@ -18,18 +18,6 @@
 
 package org.apache.streams.facebook.provider;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Queues;
-import com.google.common.collect.Sets;
-import com.google.common.util.concurrent.ListeningExecutorService;
-import com.google.common.util.concurrent.MoreExecutors;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigRenderOptions;
-import facebook4j.*;
-import facebook4j.Post;
-import facebook4j.conf.ConfigurationBuilder;
-import facebook4j.json.DataObjectFactory;
 import org.apache.streams.config.StreamsConfigurator;
 import org.apache.streams.core.DatumStatusCounter;
 import org.apache.streams.core.StreamsDatum;
@@ -39,10 +27,20 @@ import org.apache.streams.facebook.FacebookUserInformationConfiguration;
 import org.apache.streams.facebook.FacebookUserstreamConfiguration;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.util.ComponentUtils;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Queues;
+import com.google.common.collect.Sets;
+import com.google.common.util.concurrent.ListeningExecutorService;
+import com.google.common.util.concurrent.MoreExecutors;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigRenderOptions;
+
+import org.apache.commons.lang.NotImplementedException;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.commons.lang.NotImplementedException;
 
 import java.io.IOException;
 import java.io.Serializable;
@@ -51,276 +49,306 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Queue;
 import java.util.Set;
-import java.util.concurrent.*;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
+import facebook4j.Facebook;
+import facebook4j.FacebookException;
+import facebook4j.FacebookFactory;
+import facebook4j.Post;
+import facebook4j.ResponseList;
+import facebook4j.conf.ConfigurationBuilder;
+import facebook4j.json.DataObjectFactory;
+
 public class FacebookUserstreamProvider implements StreamsProvider, Serializable {
 
-    public static final String STREAMS_ID = "FacebookUserstreamProvider";
+  public static final String STREAMS_ID = "FacebookUserstreamProvider";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookUserstreamProvider.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookUserstreamProvider.class);
 
-    private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    private static final String ALL_PERMISSIONS = "read_stream";
-    private FacebookUserstreamConfiguration configuration;
+  private static final String ALL_PERMISSIONS = "read_stream";
+  private FacebookUserstreamConfiguration configuration;
 
-    private Class klass;
-    protected final ReadWriteLock lock = new ReentrantReadWriteLock();
+  private Class klass;
+  protected final ReadWriteLock lock = new ReentrantReadWriteLock();
 
-    protected volatile Queue<StreamsDatum> providerQueue = new LinkedBlockingQueue<StreamsDatum>();
+  protected volatile Queue<StreamsDatum> providerQueue = new LinkedBlockingQueue<StreamsDatum>();
 
-    public FacebookUserstreamConfiguration getConfig() {
-        return configuration;
-    }
+  public FacebookUserstreamConfiguration getConfig() {
+    return configuration;
+  }
 
-    public void setConfig(FacebookUserstreamConfiguration config) {
-        this.configuration = config;
-    }
+  public void setConfig(FacebookUserstreamConfiguration config) {
+    this.configuration = config;
+  }
 
-    protected ListeningExecutorService executor;
+  protected ListeningExecutorService executor;
 
-    protected DateTime start;
-    protected DateTime end;
+  protected DateTime start;
+  protected DateTime end;
 
-    protected final AtomicBoolean running = new AtomicBoolean();
+  protected final AtomicBoolean running = new AtomicBoolean();
 
-    private DatumStatusCounter countersCurrent = new DatumStatusCounter();
-    private DatumStatusCounter countersTotal = new DatumStatusCounter();
+  private DatumStatusCounter countersCurrent = new DatumStatusCounter();
+  private DatumStatusCounter countersTotal = new DatumStatusCounter();
 
-    protected Facebook client;
+  protected Facebook client;
 
-    private static ExecutorService newFixedThreadPoolWithQueueSize(int nThreads, int queueSize) {
-        return new ThreadPoolExecutor(nThreads, nThreads,
-                5000L, TimeUnit.MILLISECONDS,
-                new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
-    }
+  private static ExecutorService newFixedThreadPoolWithQueueSize(int numThreads, int queueSize) {
+    return new ThreadPoolExecutor(numThreads, numThreads,
+        5000L, TimeUnit.MILLISECONDS,
+        new ArrayBlockingQueue<Runnable>(queueSize, true), new ThreadPoolExecutor.CallerRunsPolicy());
+  }
 
-    public FacebookUserstreamProvider() {
-        Config config = StreamsConfigurator.config.getConfig("facebook");
-        FacebookUserInformationConfiguration facebookUserInformationConfiguration;
-        try {
-            facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return;
-        }
+  /**
+   * FacebookUserstreamProvider constructor.
+   */
+  public FacebookUserstreamProvider() {
+    Config config = StreamsConfigurator.config.getConfig("facebook");
+    FacebookUserInformationConfiguration facebookUserInformationConfiguration;
+    try {
+      facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return;
     }
-
-    public FacebookUserstreamProvider(FacebookUserstreamConfiguration config) {
-        this.configuration = config;
+  }
+
+  /**
+   * FacebookUserstreamProvider constructor.
+   * @param config config
+   */
+  public FacebookUserstreamProvider(FacebookUserstreamConfiguration config) {
+    this.configuration = config;
+  }
+
+  /**
+   * FacebookUserstreamProvider constructor.
+   * @param klass output Class
+   */
+  public FacebookUserstreamProvider(Class klass) {
+    Config config = StreamsConfigurator.config.getConfig("facebook");
+    FacebookUserInformationConfiguration facebookUserInformationConfiguration;
+    try {
+      facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
+    } catch (IOException ex) {
+      ex.printStackTrace();
+      return;
     }
-
-    public FacebookUserstreamProvider(Class klass) {
-        Config config = StreamsConfigurator.config.getConfig("facebook");
-        FacebookUserInformationConfiguration facebookUserInformationConfiguration;
-        try {
-            facebookUserInformationConfiguration = mapper.readValue(config.root().render(ConfigRenderOptions.concise()), FacebookUserInformationConfiguration.class);
-        } catch (IOException e) {
-            e.printStackTrace();
-            return;
-        }
-        this.klass = klass;
+    this.klass = klass;
+  }
+
+  public FacebookUserstreamProvider(FacebookUserstreamConfiguration config, Class klass) {
+    this.configuration = config;
+    this.klass = klass;
+  }
+
+  public Queue<StreamsDatum> getProviderQueue() {
+    return this.providerQueue;
+  }
+
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
+
+  @Override
+  public void startStream() {
+
+    client = getFacebookClient();
+
+    if ( configuration.getInfo() != null
+         &&
+         configuration.getInfo().size() > 0 ) {
+      for ( String id : configuration.getInfo()) {
+        executor.submit(new FacebookFeedPollingTask(this, id));
+      }
+      running.set(true);
+    } else {
+      try {
+        String id = client.getMe().getId();
+        executor.submit(new FacebookFeedPollingTask(this, id));
+        running.set(true);
+      } catch (FacebookException ex) {
+        LOGGER.error(ex.getMessage());
+        running.set(false);
+      }
     }
+  }
 
-    public FacebookUserstreamProvider(FacebookUserstreamConfiguration config, Class klass) {
-        this.configuration = config;
-        this.klass = klass;
-    }
+  @Override
+  public StreamsResultSet readCurrent() {
 
-    public Queue<StreamsDatum> getProviderQueue() {
-        return this.providerQueue;
-    }
+    StreamsResultSet current;
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
+    synchronized (FacebookUserstreamProvider.class) {
+      current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(providerQueue));
+      current.setCounter(new DatumStatusCounter());
+      current.getCounter().add(countersCurrent);
+      countersTotal.add(countersCurrent);
+      countersCurrent = new DatumStatusCounter();
+      providerQueue.clear();
     }
 
-    @Override
-    public void startStream() {
-
-        client = getFacebookClient();
-
-        if( configuration.getInfo() != null &&
-            configuration.getInfo().size() > 0 ) {
-            for( String id : configuration.getInfo()) {
-                executor.submit(new FacebookFeedPollingTask(this, id));
-            }
-            running.set(true);
-        } else {
-            try {
-                String id = client.getMe().getId();
-                executor.submit(new FacebookFeedPollingTask(this, id));
-                running.set(true);
-            } catch (FacebookException e) {
-                LOGGER.error(e.getMessage());
-                running.set(false);
-            }
+    return current;
+
+  }
+
+  @Override
+  public StreamsResultSet readNew(BigInteger sequence) {
+    LOGGER.debug("{} readNew", STREAMS_ID);
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public StreamsResultSet readRange(DateTime start, DateTime end) {
+    LOGGER.debug("{} readRange", STREAMS_ID);
+    this.start = start;
+    this.end = end;
+    readCurrent();
+    StreamsResultSet result = (StreamsResultSet) providerQueue.iterator();
+    return result;
+  }
+
+  @Override
+  public boolean isRunning() {
+    return running.get();
+  }
+
+  void shutdownAndAwaitTermination(ExecutorService pool) {
+    pool.shutdown(); // Disable new tasks from being submitted
+    try {
+      // Wait a while for existing tasks to terminate
+      if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+        pool.shutdownNow(); // Cancel currently executing tasks
+        // Wait a while for tasks to respond to being cancelled
+        if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
+          System.err.println("Pool did not terminate");
         }
+      }
+    } catch (InterruptedException ie) {
+      // (Re-)Cancel if current thread also interrupted
+      pool.shutdownNow();
+      // Preserve interrupt status
+      Thread.currentThread().interrupt();
     }
+  }
 
-    public StreamsResultSet readCurrent() {
+  @Override
+  public void prepare(Object configurationObject) {
 
-        StreamsResultSet current;
+    executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, 20));
 
-        synchronized (FacebookUserstreamProvider.class) {
-            current = new StreamsResultSet(Queues.newConcurrentLinkedQueue(providerQueue));
-            current.setCounter(new DatumStatusCounter());
-            current.getCounter().add(countersCurrent);
-            countersTotal.add(countersCurrent);
-            countersCurrent = new DatumStatusCounter();
-            providerQueue.clear();
-        }
+    Preconditions.checkNotNull(providerQueue);
+    Preconditions.checkNotNull(this.klass);
+    Preconditions.checkNotNull(configuration.getOauth().getAppId());
+    Preconditions.checkNotNull(configuration.getOauth().getAppSecret());
+    Preconditions.checkNotNull(configuration.getOauth().getUserAccessToken());
 
-        return current;
+    client = getFacebookClient();
 
-    }
+    if ( configuration.getInfo() != null
+         &&
+         configuration.getInfo().size() > 0 ) {
 
-    public StreamsResultSet readNew(BigInteger sequence) {
-        LOGGER.debug("{} readNew", STREAMS_ID);
-        throw new NotImplementedException();
-    }
+      List<String> ids = new ArrayList<String>();
+      List<String[]> idsBatches = new ArrayList<String[]>();
 
-    public StreamsResultSet readRange(DateTime start, DateTime end) {
-        LOGGER.debug("{} readRange", STREAMS_ID);
-        this.start = start;
-        this.end = end;
-        readCurrent();
-        StreamsResultSet result = (StreamsResultSet) providerQueue.iterator();
-        return result;
-    }
+      for (String s : configuration.getInfo()) {
+        if (s != null) {
+          ids.add(s);
 
-    @Override
-    public boolean isRunning() {
-        return running.get();
-    }
+          if (ids.size() >= 100) {
+            // add the batch
+            idsBatches.add(ids.toArray(new String[ids.size()]));
+            // reset the Ids
+            ids = new ArrayList<String>();
+          }
 
-    void shutdownAndAwaitTermination(ExecutorService pool) {
-        pool.shutdown(); // Disable new tasks from being submitted
-        try {
-            // Wait a while for existing tasks to terminate
-            if (!pool.awaitTermination(10, TimeUnit.SECONDS)) {
-                pool.shutdownNow(); // Cancel currently executing tasks
-                // Wait a while for tasks to respond to being cancelled
-                if (!pool.awaitTermination(10, TimeUnit.SECONDS))
-                    System.err.println("Pool did not terminate");
-            }
-        } catch (InterruptedException ie) {
-            // (Re-)Cancel if current thread also interrupted
-            pool.shutdownNow();
-            // Preserve interrupt status
-            Thread.currentThread().interrupt();
         }
+      }
     }
+  }
 
-    @Override
-    public void prepare(Object o) {
-
-        executor = MoreExecutors.listeningDecorator(newFixedThreadPoolWithQueueSize(5, 20));
+  protected Facebook getFacebookClient() {
+    ConfigurationBuilder cb = new ConfigurationBuilder();
+    cb.setDebugEnabled(true)
+        .setOAuthAppId(configuration.getOauth().getAppId())
+        .setOAuthAppSecret(configuration.getOauth().getAppSecret())
+        .setOAuthAccessToken(configuration.getOauth().getUserAccessToken())
+        .setOAuthPermissions(ALL_PERMISSIONS)
+        .setJSONStoreEnabled(true);
 
-        Preconditions.checkNotNull(providerQueue);
-        Preconditions.checkNotNull(this.klass);
-        Preconditions.checkNotNull(configuration.getOauth().getAppId());
-        Preconditions.checkNotNull(configuration.getOauth().getAppSecret());
-        Preconditions.checkNotNull(configuration.getOauth().getUserAccessToken());
+    FacebookFactory ff = new FacebookFactory(cb.build());
+    Facebook facebook = ff.getInstance();
 
-        client = getFacebookClient();
+    return facebook;
+  }
 
-        if( configuration.getInfo() != null &&
-            configuration.getInfo().size() > 0 ) {
+  @Override
+  public void cleanUp() {
+    shutdownAndAwaitTermination(executor);
+  }
 
-            List<String> ids = new ArrayList<String>();
-            List<String[]> idsBatches = new ArrayList<String[]>();
+  private class FacebookFeedPollingTask implements Runnable {
 
-            for (String s : configuration.getInfo()) {
-                if (s != null) {
-                    ids.add(s);
+    FacebookUserstreamProvider provider;
+    Facebook client;
+    String id;
 
-                    if (ids.size() >= 100) {
-                        // add the batch
-                        idsBatches.add(ids.toArray(new String[ids.size()]));
-                        // reset the Ids
-                        ids = new ArrayList<String>();
-                    }
+    private Set<Post> priorPollResult = Sets.newHashSet();
 
-                }
-            }
-        }
+    public FacebookFeedPollingTask(FacebookUserstreamProvider facebookUserstreamProvider) {
+      this.provider = facebookUserstreamProvider;
     }
 
-    protected Facebook getFacebookClient() {
-        ConfigurationBuilder cb = new ConfigurationBuilder();
-        cb.setDebugEnabled(true)
-                .setOAuthAppId(configuration.getOauth().getAppId())
-                .setOAuthAppSecret(configuration.getOauth().getAppSecret())
-                .setOAuthAccessToken(configuration.getOauth().getUserAccessToken())
-                .setOAuthPermissions(ALL_PERMISSIONS)
-                .setJSONStoreEnabled(true);
-
-        FacebookFactory ff = new FacebookFactory(cb.build());
-        Facebook facebook = ff.getInstance();
-
-        return facebook;
+    public FacebookFeedPollingTask(FacebookUserstreamProvider facebookUserstreamProvider, String id) {
+      this.provider = facebookUserstreamProvider;
+      this.client = provider.client;
+      this.id = id;
     }
 
     @Override
-    public void cleanUp() {
-        shutdownAndAwaitTermination(executor);
-    }
-
-    private class FacebookFeedPollingTask implements Runnable {
-
-        FacebookUserstreamProvider provider;
-        Facebook client;
-        String id;
-
-        private Set<Post> priorPollResult = Sets.newHashSet();
-
-        public FacebookFeedPollingTask(FacebookUserstreamProvider facebookUserstreamProvider) {
-            this.provider = facebookUserstreamProvider;
-        }
-
-        public FacebookFeedPollingTask(FacebookUserstreamProvider facebookUserstreamProvider, String id) {
-            this.provider = facebookUserstreamProvider;
-            this.client = provider.client;
-            this.id = id;
-        }
-        @Override
-        public void run() {
-            while (provider.isRunning()) {
-                ResponseList<Post> postResponseList;
-                try {
-                    postResponseList = client.getFeed(id);
-
-                    Set<Post> update = Sets.newHashSet(postResponseList);
-                    Set<Post> repeats = Sets.intersection(priorPollResult, Sets.newHashSet(update));
-                    Set<Post> entrySet = Sets.difference(update, repeats);
-                    LOGGER.debug(this.id + " response: " + update.size() + " previous: " + repeats.size() + " new: " + entrySet.size());
-                    for (Post item : entrySet) {
-                        String json = DataObjectFactory.getRawJSON(item);
-                        org.apache.streams.facebook.Post post = mapper.readValue(json, org.apache.streams.facebook.Post.class);
-                        try {
-                            lock.readLock().lock();
-                            ComponentUtils.offerUntilSuccess(new StreamsDatum(post), providerQueue);
-                            countersCurrent.incrementAttempt();
-                        } finally {
-                            lock.readLock().unlock();
-                        }
-                    }
-                    priorPollResult = update;
-                } catch (Exception e) {
-                    e.printStackTrace();
-                } finally {
-                    try {
-                        Thread.sleep(configuration.getPollIntervalMillis());
-                    } catch (InterruptedException e) {
-                        Thread.currentThread().interrupt();
-                    }
-                }
+    public void run() {
+      while (provider.isRunning()) {
+        ResponseList<Post> postResponseList;
+        try {
+          postResponseList = client.getFeed(id);
+
+          Set<Post> update = Sets.newHashSet(postResponseList);
+          Set<Post> repeats = Sets.intersection(priorPollResult, Sets.newHashSet(update));
+          Set<Post> entrySet = Sets.difference(update, repeats);
+          LOGGER.debug(this.id + " response: " + update.size() + " previous: " + repeats.size() + " new: " + entrySet.size());
+          for (Post item : entrySet) {
+            String json = DataObjectFactory.getRawJSON(item);
+            org.apache.streams.facebook.Post post = mapper.readValue(json, org.apache.streams.facebook.Post.class);
+            try {
+              lock.readLock().lock();
+              ComponentUtils.offerUntilSuccess(new StreamsDatum(post), providerQueue);
+              countersCurrent.incrementAttempt();
+            } finally {
+              lock.readLock().unlock();
             }
+          }
+          priorPollResult = update;
+        } catch (Exception ex) {
+          ex.printStackTrace();
+        } finally {
+          try {
+            Thread.sleep(configuration.getPollIntervalMillis());
+          } catch (InterruptedException interrupt) {
+            Thread.currentThread().interrupt();
+          }
         }
+      }
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageDataCollector.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageDataCollector.java
index 0e88dd4..68d8f06 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageDataCollector.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageDataCollector.java
@@ -20,7 +20,6 @@ package org.apache.streams.facebook.provider.page;
 
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.facebook.FacebookConfiguration;
-import org.apache.streams.facebook.FacebookPageProviderConfiguration;
 import org.apache.streams.facebook.IdConfig;
 import org.apache.streams.facebook.provider.FacebookDataCollector;
 import org.apache.streams.jackson.StreamsJacksonMapper;
@@ -28,7 +27,6 @@ import org.apache.streams.jackson.StreamsJacksonMapper;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.annotations.VisibleForTesting;
 
-import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -36,11 +34,10 @@ import java.util.concurrent.BlockingQueue;
 
 import facebook4j.FacebookException;
 import facebook4j.Page;
-import facebook4j.Reading;
 import facebook4j.json.DataObjectFactory;
 
 /**
- * Collects the page data from public Facebook pages
+ * Collects the page data from public Facebook pages.
  */
 public class FacebookPageDataCollector extends FacebookDataCollector {
 
@@ -48,11 +45,8 @@ public class FacebookPageDataCollector extends FacebookDataCollector {
   private static final int MAX_ATTEMPTS = 5;
   private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
 
-  private String fields;
-
-  public FacebookPageDataCollector(BlockingQueue<StreamsDatum> queue, FacebookPageProviderConfiguration configuration) {
+  public FacebookPageDataCollector(BlockingQueue<StreamsDatum> queue, FacebookConfiguration configuration) {
     super(configuration, queue);
-    fields = StringUtils.join(configuration.getFields(), ',');
   }
 
   @Override
@@ -70,7 +64,7 @@ public class FacebookPageDataCollector extends FacebookDataCollector {
     while (attempt < MAX_ATTEMPTS) {
       ++attempt;
       try {
-        Page page = getNextFacebookClient().getPage(pageId, new Reading().fields(fields));
+        Page page = getNextFacebookClient().getPage(pageId);
         return page;
       } catch (FacebookException fe) {
         LOGGER.error("Facebook returned an exception : {}", fe);

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageProvider.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageProvider.java
index d11a486..e7bbcfa 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageProvider.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/page/FacebookPageProvider.java
@@ -80,9 +80,8 @@ public class FacebookPageProvider extends FacebookProvider {
 
   private FacebookPageProviderConfiguration configuration;
 
-  public FacebookPageProvider(FacebookPageProviderConfiguration facebookConfiguration) {
+  public FacebookPageProvider(FacebookConfiguration facebookConfiguration) {
     super(facebookConfiguration);
-    configuration = facebookConfiguration;
   }
 
   @VisibleForTesting
@@ -92,7 +91,7 @@ public class FacebookPageProvider extends FacebookProvider {
 
   @Override
   protected FacebookDataCollector getDataCollector() {
-    return new FacebookPageDataCollector(super.datums, configuration);
+    return new FacebookPageDataCollector(super.datums, super.configuration);
   }
 
   /**
@@ -115,7 +114,7 @@ public class FacebookPageProvider extends FacebookProvider {
     Config typesafe  = conf.withFallback(reference).resolve();
 
     StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-    FacebookPageProviderConfiguration config = new ComponentConfigurator<>(FacebookPageProviderConfiguration.class).detectConfiguration(typesafe, "facebook");
+    FacebookConfiguration config = new ComponentConfigurator<>(FacebookConfiguration.class).detectConfiguration(typesafe, "facebook");
     FacebookPageProvider provider = new FacebookPageProvider(config);
 
     PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedDataCollector.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedDataCollector.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedDataCollector.java
index c2ba700..f509170 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedDataCollector.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedDataCollector.java
@@ -15,116 +15,124 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.facebook.provider.pagefeed;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import facebook4j.*;
-import facebook4j.json.DataObjectFactory;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.facebook.FacebookConfiguration;
 import org.apache.streams.facebook.IdConfig;
 import org.apache.streams.facebook.provider.FacebookDataCollector;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.concurrent.BlockingQueue;
 
+import facebook4j.FacebookException;
+import facebook4j.Paging;
+import facebook4j.Post;
+import facebook4j.Reading;
+import facebook4j.ResponseList;
+import facebook4j.json.DataObjectFactory;
+
 /**
- * Collects the page feed data from public Facebook pages
+ * Collects the page feed data from public Facebook pages.
  */
 public class FacebookPageFeedDataCollector extends FacebookDataCollector {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageFeedDataCollector.class);
-    private static final int MAX_ATTEMPTS = 5;
-    private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-    private static final int LIMIT = 100;
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageFeedDataCollector.class);
+  private static final int MAX_ATTEMPTS = 5;
+  private static final ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+  private static final int LIMIT = 100;
 
-    public FacebookPageFeedDataCollector(BlockingQueue<StreamsDatum> queue, FacebookConfiguration configuration) {
-        super(configuration, queue);
-    }
+  public FacebookPageFeedDataCollector(BlockingQueue<StreamsDatum> queue, FacebookConfiguration configuration) {
+    super(configuration, queue);
+  }
+
+  @Override
+  protected void getData(IdConfig id) throws Exception {
+    boolean exit = false;
 
-    @Override
-    protected void getData(IdConfig id) throws Exception {
-        boolean exit = false;
+    ResponseList<Post> facebookPosts = getPosts(id.getId());
+    LOGGER.debug("Post received : {}", facebookPosts.size());
+    backOff.reset();
+    do {
+      for (Post post : facebookPosts) {
+        if (id.getBeforeDate() != null && id.getAfterDate() != null) {
+          if (id.getBeforeDate().isAfter(post.getCreatedTime().getTime())
+              && id.getAfterDate().isBefore(post.getCreatedTime().getTime())) {
+            super.outputData(MAPPER.readValue(DataObjectFactory.getRawJSON(post), org.apache.streams.facebook.Post.class), post.getId());
 
-        ResponseList<Post> facebookPosts = getPosts(id.getId());
-        LOGGER.debug("Post received : {}", facebookPosts.size());
+          }
+        } else if (id.getBeforeDate() != null && id.getBeforeDate().isAfter(post.getCreatedTime().getTime())) {
+          super.outputData(MAPPER.readValue(DataObjectFactory.getRawJSON(post), org.apache.streams.facebook.Post.class), post.getId());
+        } else if (id.getAfterDate() != null && id.getAfterDate().isBefore(post.getCreatedTime().getTime())) {
+          super.outputData(MAPPER.readValue(DataObjectFactory.getRawJSON(post), org.apache.streams.facebook.Post.class), post.getId());
+        } else if (id.getBeforeDate() == null && id.getAfterDate() == null) {
+          super.outputData(MAPPER.readValue(DataObjectFactory.getRawJSON(post), org.apache.streams.facebook.Post.class), post.getId());
+        } else {
+          exit = true;
+          LOGGER.debug("Breaking on post, {}, with createdAtDate {}", post.getId(), post.getCreatedTime());
+          break;
+        }
+      }
+      if (facebookPosts.getPaging() != null && !exit) {
+        LOGGER.debug("Paging. . .");
+        facebookPosts = getPosts(facebookPosts.getPaging());
         backOff.reset();
-        do {
-            for(Post post : facebookPosts) {
-                if(id.getBeforeDate() != null && id.getAfterDate() != null) {
-                    if(id.getBeforeDate().isAfter(post.getCreatedTime().getTime())
-                            && id.getAfterDate().isBefore(post.getCreatedTime().getTime())) {
-                        super.outputData(MAPPER.readValue(DataObjectFactory.getRawJSON(post), org.apache.streams.facebook.Post.class), post.getId());
-
-                    }
-                } else if(id.getBeforeDate() != null && id.getBeforeDate().isAfter(post.getCreatedTime().getTime())) {
-                    super.outputData(MAPPER.readValue(DataObjectFactory.getRawJSON(post), org.apache.streams.facebook.Post.class), post.getId());
-                } else if(id.getAfterDate() != null && id.getAfterDate().isBefore(post.getCreatedTime().getTime())) {
-                    super.outputData(MAPPER.readValue(DataObjectFactory.getRawJSON(post), org.apache.streams.facebook.Post.class), post.getId());
-                } else if(id.getBeforeDate() == null && id.getAfterDate() == null) {
-                    super.outputData(MAPPER.readValue(DataObjectFactory.getRawJSON(post), org.apache.streams.facebook.Post.class), post.getId());
-                } else {
-                    exit = true;
-                    LOGGER.debug("Breaking on post, {}, with createdAtDate {}", post.getId(), post.getCreatedTime());
-                    break;
-                }
-            }
-            if(facebookPosts.getPaging() != null && !exit) {
-                LOGGER.debug("Paging. . .");
-                facebookPosts = getPosts(facebookPosts.getPaging());
-                backOff.reset();
-                LOGGER.debug("Paging received {} posts*", facebookPosts.size());
-            } else {
-                LOGGER.debug("No more paging.");
-                facebookPosts = null;
-            }
-        } while(facebookPosts != null && facebookPosts.size() != 0);
+        LOGGER.debug("Paging received {} posts*", facebookPosts.size());
+      } else {
+        LOGGER.debug("No more paging.");
+        facebookPosts = null;
+      }
+    }
+    while (facebookPosts != null && facebookPosts.size() != 0);
 
+  }
 
-    }
+  private ResponseList<Post> getPosts(Paging<Post> paging) throws Exception {
+    return getPosts(null, paging);
+  }
 
-    private ResponseList<Post> getPosts(Paging<Post> paging) throws Exception{
-        return getPosts(null, paging);
-    }
+  private ResponseList<Post> getPosts(String pageId) throws Exception {
+    return getPosts(pageId, null);
+  }
 
-    private ResponseList<Post> getPosts(String pageId) throws Exception {
-        return getPosts(pageId, null);
-    }
+  /**
+   * Queries facebook.  Attempts requests up to 5 times and backs off on each facebook exception.
+   * @param pageId pageId
+   * @param paging paging
+   * @return ResponseList of $link{facebook4j.Post}
+   * @throws Exception Exception
+   */
+  private ResponseList<Post> getPosts(String pageId, Paging<Post> paging) throws Exception {
+    int attempt = 0;
+    while (attempt < MAX_ATTEMPTS) {
+      ++attempt;
+      try {
+        if (pageId != null) {
+          Reading reading = new Reading();
+          reading.limit(LIMIT);
+          return getNextFacebookClient().getPosts(pageId, reading);
+        } else {
+          return getNextFacebookClient().fetchNext(paging);
+        }
+      } catch (FacebookException fe) {
+        LOGGER.error("Facebook returned an exception : {}", fe);
+        LOGGER.error("Facebook returned an exception while trying to get feed for page, {} : {}", pageId, fe.getMessage());
+        //TODO Rate limit exceptions with facebook4j unclear http://facebook4j.org/oldjavadocs/1.1.12-2.0.0/2.0.0/index.html?facebook4j/internal/http/HttpResponseCode.html
+        // back off at all exceptions until figured out.
+        int errorCode = fe.getErrorCode();
 
-    /**
-     * Queries facebook.  Attempts requests up to 5 times and backs off on each facebook exception.
-     * @param pageId
-     * @param paging
-     * @return
-     * @throws Exception
-     */
-    private ResponseList<Post> getPosts(String pageId, Paging<Post> paging) throws Exception {
-        int attempt = 0;
-        while(attempt < MAX_ATTEMPTS) {
-            ++attempt;
-            try {
-                if (pageId != null) {
-                    Reading reading = new Reading();
-                    reading.limit(LIMIT);
-                    return getNextFacebookClient().getPosts(pageId, reading);
-                }
-                else
-                    return getNextFacebookClient().fetchNext(paging);
-            } catch (FacebookException fe) {
-                LOGGER.error("Facebook returned an exception : {}", fe);
-                LOGGER.error("Facebook returned an exception while trying to get feed for page, {} : {}", pageId, fe.getMessage());
-                //TODO Rate limit exceptions with facebook4j unclear http://facebook4j.org/oldjavadocs/1.1.12-2.0.0/2.0.0/index.html?facebook4j/internal/http/HttpResponseCode.html
-                // back off at all exceptions until figured out.
-                int errorCode = fe.getErrorCode();
-
-                //Some sort of rate limiting
-                if(errorCode == 17 || errorCode == 4 || errorCode == 341) {
-                    super.backOff.backOff();
-                }
-            }
+        //Some sort of rate limiting
+        if (errorCode == 17 || errorCode == 4 || errorCode == 341) {
+          super.backOff.backOff();
         }
-        throw new Exception("Failed to get data from facebook after "+MAX_ATTEMPTS);
+      }
     }
+    throw new Exception("Failed to get data from facebook after " + MAX_ATTEMPTS);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedProvider.java
----------------------------------------------------------------------
diff --git a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedProvider.java b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedProvider.java
index 308b129..5d977e0 100644
--- a/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedProvider.java
+++ b/streams-contrib/streams-provider-facebook/src/main/java/org/apache/streams/facebook/provider/pagefeed/FacebookPageFeedProvider.java
@@ -15,15 +15,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.facebook.provider.pagefeed;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import com.google.common.util.concurrent.Uninterruptibles;
-import com.typesafe.config.Config;
-import com.typesafe.config.ConfigFactory;
-import com.typesafe.config.ConfigParseOptions;
 import org.apache.streams.config.ComponentConfigurator;
 import org.apache.streams.config.StreamsConfiguration;
 import org.apache.streams.config.StreamsConfigurator;
@@ -33,6 +27,15 @@ import org.apache.streams.facebook.provider.FacebookDataCollector;
 import org.apache.streams.facebook.provider.FacebookProvider;
 import org.apache.streams.facebook.provider.page.FacebookPageProvider;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.base.Preconditions;
+import com.google.common.util.concurrent.Uninterruptibles;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import com.typesafe.config.ConfigParseOptions;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,65 +47,71 @@ import java.util.Iterator;
 import java.util.concurrent.TimeUnit;
 
 /**
- *
+ * FacebookPageFeedProvider provides content from facebook public page.
  */
 public class FacebookPageFeedProvider extends FacebookProvider {
 
-    public static final String STREAMS_ID = "FacebookPageFeedProvider";
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageProvider.class);
-
-    private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
-
-    public FacebookPageFeedProvider() {
-        super();
-    }
-
-    public FacebookPageFeedProvider(FacebookConfiguration config) {
-        super(config);
-    }
-
-    @Override
-    protected FacebookDataCollector getDataCollector() {
-        return new FacebookPageFeedDataCollector(super.datums, super.configuration);
-    }
-
-    public static void main(String[] args) throws Exception {
-
-        Preconditions.checkArgument(args.length >= 2);
-
-        String configfile = args[0];
-        String outfile = args[1];
-
-        Config reference = ConfigFactory.load();
-        File conf_file = new File(configfile);
-        assert(conf_file.exists());
-        Config conf = ConfigFactory.parseFileAnySyntax(conf_file, ConfigParseOptions.defaults().setAllowMissing(false));
-
-        Config typesafe  = conf.withFallback(reference).resolve();
-
-        StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
-        FacebookConfiguration config = new ComponentConfigurator<>(FacebookConfiguration.class).detectConfiguration(typesafe, "facebook");
-        FacebookPageFeedProvider provider = new FacebookPageFeedProvider(config);
-
-        PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
-        provider.prepare(config);
-        provider.startStream();
-        do {
-            Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
-            Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
-            while(iterator.hasNext()) {
-                StreamsDatum datum = iterator.next();
-                String json;
-                try {
-                    json = MAPPER.writeValueAsString(datum.getDocument());
-                    outStream.println(json);
-                } catch (JsonProcessingException e) {
-                    System.err.println(e.getMessage());
-                }
-            }
-        } while( provider.isRunning());
-        provider.cleanUp();
-        outStream.flush();
+  public static final String STREAMS_ID = "FacebookPageFeedProvider";
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(FacebookPageProvider.class);
+
+  private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  public FacebookPageFeedProvider() {
+    super();
+  }
+
+  public FacebookPageFeedProvider(FacebookConfiguration config) {
+    super(config);
+  }
+
+  @Override
+  protected FacebookDataCollector getDataCollector() {
+    return new FacebookPageFeedDataCollector(super.datums, super.configuration);
+  }
+
+  /**
+   * Run FacebookPageFeedProvider from command line.
+   * @param args configfile outfile
+   * @throws Exception Exception
+   */
+  public static void main(String[] args) throws Exception {
+
+    Preconditions.checkArgument(args.length >= 2);
+
+    String configfile = args[0];
+    String outfile = args[1];
+
+    Config reference = ConfigFactory.load();
+    File confFile = new File(configfile);
+    assert (confFile.exists());
+    Config conf = ConfigFactory.parseFileAnySyntax(confFile, ConfigParseOptions.defaults().setAllowMissing(false));
+
+    Config typesafe  = conf.withFallback(reference).resolve();
+
+    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
+    FacebookConfiguration config = new ComponentConfigurator<>(FacebookConfiguration.class).detectConfiguration(typesafe, "facebook");
+    FacebookPageFeedProvider provider = new FacebookPageFeedProvider(config);
+
+    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
+    provider.prepare(config);
+    provider.startStream();
+    do {
+      Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(), TimeUnit.MILLISECONDS);
+      Iterator<StreamsDatum> iterator = provider.readCurrent().iterator();
+      while (iterator.hasNext()) {
+        StreamsDatum datum = iterator.next();
+        String json;
+        try {
+          json = MAPPER.writeValueAsString(datum.getDocument());
+          outStream.println(json);
+        } catch (JsonProcessingException ex) {
+          System.err.println(ex.getMessage());
+        }
+      }
     }
+    while ( provider.isRunning());
+    provider.cleanUp();
+    outStream.flush();
+  }
 }


[41/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseDocumentClassifier.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseDocumentClassifier.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseDocumentClassifier.java
index d39c087..cdba150 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseDocumentClassifier.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseDocumentClassifier.java
@@ -19,61 +19,72 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.streams.data.DocumentClassifier;
 import org.apache.streams.data.util.ActivityUtil;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
 /**
+ * Ensures generic String and ObjectNode documents can be converted to Activity
+ *
+ * <p/>
  * BaseDocumentClassifier is included by default in all
  * @see org.apache.streams.converter.ActivityConverterProcessor
  *
- * Ensures generic String and ObjectNode documents can be converted to Activity
- *
  */
 public class BaseDocumentClassifier implements DocumentClassifier {
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public List<Class> detectClasses(Object document) {
-
-        Activity activity;
-        ObjectNode node = null;
-
-        List<Class> classes = new ArrayList<>();
-        // Soon javax.validation will available in jackson
-        //   That will make this simpler and more powerful
-        if( document instanceof String ) {
-            classes.add(String.class);
-            try {
-                activity = this.mapper.readValue((String)document, Activity.class);
-                if(activity != null && ActivityUtil.isValid(activity))
-                    classes.add(Activity.class);
-            } catch (IOException e1) {
-                try {
-                    node = this.mapper.readValue((String)document, ObjectNode.class);
-                    classes.add(ObjectNode.class);
-                } catch (IOException ignored) { }
-            }
-        } else if( document instanceof ObjectNode ){
-            classes.add(ObjectNode.class);
-            activity = this.mapper.convertValue(document, Activity.class);
-            if(ActivityUtil.isValid(activity))
-                classes.add(Activity.class);
-        } else {
-            classes.add(document.getClass());
-        }
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(BaseDocumentClassifier.class);
+
+  @Override
+  @SuppressWarnings("unchecked")
+  public List<Class> detectClasses(Object document) {
 
-        return classes;
+    Activity activity;
+    ObjectNode node = null;
 
+    List<Class> classes = new ArrayList<>();
+    // Soon javax.validation will available in jackson
+    //   That will make this simpler and more powerful
+    if ( document instanceof String ) {
+      classes.add(String.class);
+      try {
+        activity = this.mapper.readValue((String)document, Activity.class);
+        if (activity != null && ActivityUtil.isValid(activity)) {
+          classes.add(Activity.class);
+        }
+      } catch (IOException e1) {
+        try {
+          node = this.mapper.readValue((String)document, ObjectNode.class);
+          classes.add(ObjectNode.class);
+        } catch (IOException ignored) {
+          LOGGER.trace("ignoring ", ignored);
+        }
+      }
+    } else if ( document instanceof ObjectNode ) {
+      classes.add(ObjectNode.class);
+      activity = this.mapper.convertValue(document, Activity.class);
+      if (ActivityUtil.isValid(activity)) {
+        classes.add(Activity.class);
+      }
+    } else {
+      classes.add(document.getClass());
     }
 
+    return classes;
+
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityConverter.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityConverter.java
index b6cde29..cb61f0e 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityConverter.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityConverter.java
@@ -19,75 +19,88 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.google.common.collect.Lists;
+
+import org.apache.commons.lang.NotImplementedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.util.List;
 
 /**
+ * Ensures generic ObjectNode representation of an Activity can be converted to Activity
+ *
+ * <p/>
  * BaseObjectNodeActivityConverter is included by default in all
  * @see {@link org.apache.streams.converter.ActivityConverterProcessor}
  *
- * Ensures generic ObjectNode representation of an Activity can be converted to Activity
  *
  */
 public class BaseObjectNodeActivityConverter implements ActivityConverter<ObjectNode> {
 
-    public static Class requiredClass = ObjectNode.class;
+  private static final Logger LOGGER = LoggerFactory.getLogger(BaseObjectNodeActivityConverter.class);
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  public static Class requiredClass = ObjectNode.class;
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    @Override
-    public ObjectNode fromActivity(Activity deserialized) throws ActivityConversionException {
-        try {
-           return mapper.convertValue(deserialized, ObjectNode.class);
-        } catch (Exception e) {
-            throw new ActivityConversionException();
-        }
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public List<Activity> toActivityList(ObjectNode serialized) throws ActivityConversionException {
-        List<Activity> activityList = Lists.newArrayList();
-        try {
-            activityList.add(mapper.convertValue(serialized, Activity.class));
-        } catch (Exception e) {
-            throw new ActivityConversionException();
-        } finally {
-            return activityList;
-        }
+  @Override
+  public ObjectNode fromActivity(Activity deserialized) throws ActivityConversionException {
+    try {
+      return mapper.convertValue(deserialized, ObjectNode.class);
+    } catch (Exception ex) {
+      throw new ActivityConversionException();
     }
+  }
+
+  @Override
+  public List<ObjectNode> fromActivityList(List<Activity> list) {
+    throw new NotImplementedException();
+  }
 
-    @Override
-    public List<ObjectNode> fromActivityList(List<Activity> list) {
-        throw new NotImplementedException();
+  @Override
+  public List<Activity> toActivityList(ObjectNode serialized) throws ActivityConversionException {
+    List<Activity> activityList = Lists.newArrayList();
+    try {
+      activityList.add(mapper.convertValue(serialized, Activity.class));
+    } catch (Exception ex) {
+      throw new ActivityConversionException();
+    } finally {
+      return activityList;
     }
+  }
 
-    @Override
-    public List<Activity> toActivityList(List<ObjectNode> list) {
-        List<Activity> result = Lists.newArrayList();
-        for( ObjectNode item : list ) {
-            try {
-                result.addAll(toActivityList(item));
-            } catch (ActivityConversionException e) {}
-        }
-        return result;
+  @Override
+  public List<Activity> toActivityList(List<ObjectNode> list) {
+    List<Activity> result = Lists.newArrayList();
+    for ( ObjectNode item : list ) {
+      try {
+        result.addAll(toActivityList(item));
+      } catch (ActivityConversionException ex) {
+        LOGGER.trace("ActivityConversionException", ex);
+      }
     }
+    return result;
+  }
+
+
+
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityObjectConverter.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityObjectConverter.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityObjectConverter.java
index cb66414..a38585e 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityObjectConverter.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseObjectNodeActivityObjectConverter.java
@@ -19,58 +19,54 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
-import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.data.ActivityObjectConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.jackson.StreamsJacksonMapper;
-import org.apache.streams.pojo.json.Activity;
 import org.apache.streams.pojo.json.ActivityObject;
 
-import java.util.List;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
 
 /**
+ * Ensures generic ObjectNode representation of an Activity can be converted to Activity.
+ *
+ * <p/>
  * BaseObjectNodeActivityConverter is included by default in all
  * @see {@link ActivityConverterProcessor}
  *
- * Ensures generic ObjectNode representation of an Activity can be converted to Activity
- *
  */
 public class BaseObjectNodeActivityObjectConverter implements ActivityObjectConverter<ObjectNode> {
 
-    public static Class requiredClass = ObjectNode.class;
+  public static Class requiredClass = ObjectNode.class;
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public ObjectNode fromActivityObject(ActivityObject deserialized) throws ActivityConversionException {
-        try {
-            return mapper.convertValue(deserialized, ObjectNode.class);
-        } catch (Exception e) {
-            throw new ActivityConversionException();
-        }
+  @Override
+  public ObjectNode fromActivityObject(ActivityObject deserialized) throws ActivityConversionException {
+    try {
+      return mapper.convertValue(deserialized, ObjectNode.class);
+    } catch (Exception ex) {
+      throw new ActivityConversionException();
     }
+  }
 
-    @Override
-    public ActivityObject toActivityObject(ObjectNode serialized) throws ActivityConversionException {
-        try {
-            return mapper.convertValue(serialized, ActivityObject.class);
-        } catch (Exception e) {
-            throw new ActivityConversionException();
-        }
+  @Override
+  public ActivityObject toActivityObject(ObjectNode serialized) throws ActivityConversionException {
+    try {
+      return mapper.convertValue(serialized, ActivityObject.class);
+    } catch (Exception ex) {
+      throw new ActivityConversionException();
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityConverter.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityConverter.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityConverter.java
index 7438abb..da15dee 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityConverter.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityConverter.java
@@ -19,75 +19,86 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
-import org.apache.commons.lang.NotImplementedException;
 import org.apache.streams.data.ActivityConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.Activity;
 
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
+
+import org.apache.commons.lang.NotImplementedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.util.List;
 
 /**
+ * Ensures generic String Json representation of an Activity can be converted to Activity
+ *
+ * <p/>
  * BaseObjectNodeActivityConverter is included by default in all
  * @see {@link org.apache.streams.converter.ActivityConverterProcessor}
  *
- * Ensures generic String Json representation of an Activity can be converted to Activity
- *
  */
 public class BaseStringActivityConverter implements ActivityConverter<String> {
 
-    public static Class requiredClass = String.class;
+  private static final Logger LOGGER = LoggerFactory.getLogger(BaseObjectNodeActivityConverter.class);
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  public static final Class requiredClass = String.class;
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    @Override
-    public String fromActivity(Activity deserialized) throws ActivityConversionException {
-        try {
-            return mapper.writeValueAsString(deserialized);
-        } catch (JsonProcessingException e) {
-            throw new ActivityConversionException();
-        }
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public List<Activity> toActivityList(String serialized) throws ActivityConversionException {
-        List<Activity> activityList = Lists.newArrayList();
-        try {
-            activityList.add(mapper.readValue(serialized, Activity.class));
-        } catch (Exception e) {
-            throw new ActivityConversionException();
-        } finally {
-            return activityList;
-        }
+  @Override
+  public String fromActivity(Activity deserialized) throws ActivityConversionException {
+    try {
+      return mapper.writeValueAsString(deserialized);
+    } catch (JsonProcessingException ex) {
+      throw new ActivityConversionException();
     }
+  }
+
+  @Override
+  public List<String> fromActivityList(List<Activity> list) {
+    throw new NotImplementedException();
+  }
 
-    @Override
-    public List<String> fromActivityList(List<Activity> list) {
-        throw new NotImplementedException();
+  @Override
+  public List<Activity> toActivityList(String serialized) throws ActivityConversionException {
+    List<Activity> activityList = Lists.newArrayList();
+    try {
+      activityList.add(mapper.readValue(serialized, Activity.class));
+    } catch (Exception ex) {
+      throw new ActivityConversionException();
+    } finally {
+      return activityList;
     }
+  }
 
-    @Override
-    public List<Activity> toActivityList(List<String> list) {
-        List<Activity> result = Lists.newArrayList();
-        for( String item : list ) {
-            try {
-                result.addAll(toActivityList(item));
-            } catch (ActivityConversionException e) {}
-        }
-        return result;
+  @Override
+  public List<Activity> toActivityList(List<String> list) {
+    List<Activity> result = Lists.newArrayList();
+    for ( String item : list ) {
+      try {
+        result.addAll(toActivityList(item));
+      } catch (ActivityConversionException ex) {
+        LOGGER.trace("ActivityConversionException", ex);
+      }
     }
+    return result;
+  }
+
+
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityObjectConverter.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityObjectConverter.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityObjectConverter.java
index 3bbbdac..7322fc1 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityObjectConverter.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/BaseStringActivityObjectConverter.java
@@ -19,52 +19,53 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.streams.data.ActivityObjectConverter;
 import org.apache.streams.exceptions.ActivityConversionException;
 import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.apache.streams.pojo.json.ActivityObject;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+
 /**
+ * Ensures generic ObjectNode representation of an Activity can be converted to Activity.
+ *
+ * <p/>
  * BaseObjectNodeActivityConverter is included by default in all
  * @see {@link ActivityConverterProcessor}
  *
- * Ensures generic ObjectNode representation of an Activity can be converted to Activity
- *
  */
 public class BaseStringActivityObjectConverter implements ActivityObjectConverter<String> {
 
-    public static Class requiredClass = String.class;
+  public static Class requiredClass = String.class;
 
-    private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+  private ObjectMapper mapper = StreamsJacksonMapper.getInstance();
 
-    @Override
-    public Class requiredClass() {
-        return requiredClass;
-    }
+  @Override
+  public Class requiredClass() {
+    return requiredClass;
+  }
 
-    @Override
-    public String serializationFormat() {
-        return null;
-    }
+  @Override
+  public String serializationFormat() {
+    return null;
+  }
 
-    @Override
-    public String fromActivityObject(ActivityObject deserialized) throws ActivityConversionException {
-        try {
-            return mapper.writeValueAsString(deserialized);
-        } catch (Exception e) {
-            throw new ActivityConversionException();
-        }
+  @Override
+  public String fromActivityObject(ActivityObject deserialized) throws ActivityConversionException {
+    try {
+      return mapper.writeValueAsString(deserialized);
+    } catch (Exception ex) {
+      throw new ActivityConversionException();
     }
+  }
 
-    @Override
-    public ActivityObject toActivityObject(String serialized) throws ActivityConversionException {
-        try {
-            return mapper.readValue(serialized, ActivityObject.class);
-        } catch (Exception e) {
-            throw new ActivityConversionException();
-        }
+  @Override
+  public ActivityObject toActivityObject(String serialized) throws ActivityConversionException {
+    try {
+      return mapper.readValue(serialized, ActivityObject.class);
+    } catch (Exception ex) {
+      throw new ActivityConversionException();
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/FieldConstants.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/FieldConstants.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/FieldConstants.java
index 26dfcb3..3443bd9 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/FieldConstants.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/FieldConstants.java
@@ -19,14 +19,14 @@
 package org.apache.streams.converter;
 
 /**
- * Predefined field symbols
+ * Predefined field symbols.
  */
 public class FieldConstants {
 
-    protected static final String ID = "ID";
-    protected static final String SEQ = "SEQ";
-    protected static final String TS = "TS";
-    protected static final String META = "META";
-    protected static final String DOC = "DOC";
+  protected static final String ID = "ID";
+  protected static final String SEQ = "SEQ";
+  protected static final String TS = "TS";
+  protected static final String META = "META";
+  protected static final String DOC = "DOC";
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterProcessor.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterProcessor.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterProcessor.java
index b3ee72f..44aa56b 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterProcessor.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterProcessor.java
@@ -19,11 +19,12 @@ under the License.
 
 package org.apache.streams.converter;
 
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.core.util.DatumUtils;
-import org.apache.streams.pojo.json.Activity;
+
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -33,53 +34,62 @@ import java.util.List;
  * HoconConverterProcessor is a utility processor for converting any datum document
  * with translation rules expressed as HOCON in the classpath or at a URL.
  *
+ * <p/>
  * To use this capability without a dedicated stream processor, just use HoconConverterUtil.
  */
 public class HoconConverterProcessor implements StreamsProcessor {
 
-    public static final String STREAMS_ID = "HoconConverterProcessor";
+  public static final String STREAMS_ID = "HoconConverterProcessor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(HoconConverterProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(HoconConverterProcessor.class);
 
-    protected Class outClass;
-    protected String hocon;
-    protected String inPath;
-    protected String outPath;
+  protected Class outClass;
+  protected String hocon;
+  protected String inPath;
+  protected String outPath;
 
-    public HoconConverterProcessor(Class outClass, String hocon, String inPath, String outPath) {
-        this.outClass = outClass;
-        this.hocon = hocon;
-        this.inPath = inPath;
-        this.outPath = outPath;
-    }
+  /**
+   * HoconConverterProcessor.
+   *
+   * @param outClass outClass
+   * @param hocon hocon
+   * @param inPath inPath
+   * @param outPath outPath
+   */
+  public HoconConverterProcessor(Class outClass, String hocon, String inPath, String outPath) {
+    this.outClass = outClass;
+    this.hocon = hocon;
+    this.inPath = inPath;
+    this.outPath = outPath;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        List<StreamsDatum> result = Lists.newLinkedList();
-        Object document = entry.getDocument();
+    List<StreamsDatum> result = Lists.newLinkedList();
+    Object document = entry.getDocument();
 
-        Object outDoc = HoconConverterUtil.getInstance().convert(document, outClass, hocon, inPath, outPath);
+    Object outDoc = HoconConverterUtil.getInstance().convert(document, outClass, hocon, inPath, outPath);
 
-        StreamsDatum datum = DatumUtils.cloneDatum(entry);
-        datum.setDocument(outDoc);
-        result.add(datum);
+    StreamsDatum datum = DatumUtils.cloneDatum(entry);
+    datum.setDocument(outDoc);
+    result.add(datum);
 
-        return result;
-    }
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-    }
+  }
 
-    @Override
-    public void cleanUp() {
+  @Override
+  public void cleanUp() {
 
-    }
-};
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterUtil.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterUtil.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterUtil.java
index f8db30c..bac081c 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterUtil.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/HoconConverterUtil.java
@@ -18,6 +18,8 @@
 
 package org.apache.streams.converter;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -26,7 +28,7 @@ import com.typesafe.config.ConfigFactory;
 import com.typesafe.config.ConfigObject;
 import com.typesafe.config.ConfigRenderOptions;
 import com.typesafe.config.ConfigValue;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -34,95 +36,104 @@ import java.io.IOException;
 
 /**
  * HoconConverterUtil supports HoconConverterProcessor in converting types via application
- * of hocon (https://github.com/typesafehub/config/blob/master/HOCON.md) scripts
+ * of hocon (https://github.com/typesafehub/config/blob/master/HOCON.md) scripts.
  */
 public class HoconConverterUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(HoconConverterUtil.class);
-
-    private static ObjectMapper mapper = StreamsJacksonMapper.getInstance();
-
-    private static final HoconConverterUtil INSTANCE = new HoconConverterUtil();
-
-    public static HoconConverterUtil getInstance(){
-        return INSTANCE;
-    }
-
-    public Object convert(Object object, Class outClass, String hoconResource) {
-        Config hocon = ConfigFactory.parseResources(hoconResource);
-        return convert(object, outClass, hocon, null);
-    }
-
-    public Object convert(Object object, Class outClass, String hoconResource, String outPath) {
-        Config hocon = ConfigFactory.parseResources(hoconResource);
-        return convert(object, outClass, hocon, outPath);
+  private static final Logger LOGGER = LoggerFactory.getLogger(HoconConverterUtil.class);
+
+  private static final ObjectMapper mapper = StreamsJacksonMapper.getInstance();
+
+  private static final HoconConverterUtil INSTANCE = new HoconConverterUtil();
+
+  public static HoconConverterUtil getInstance() {
+    return INSTANCE;
+  }
+
+  public Object convert(Object object, Class outClass, String hoconResource) {
+    Config hocon = ConfigFactory.parseResources(hoconResource);
+    return convert(object, outClass, hocon, null);
+  }
+
+  public Object convert(Object object, Class outClass, String hoconResource, String outPath) {
+    Config hocon = ConfigFactory.parseResources(hoconResource);
+    return convert(object, outClass, hocon, outPath);
+  }
+
+  public Object convert(Object object, Class outClass, String hoconResource, String inPath, String outPath) {
+    Config hocon = ConfigFactory.parseResources(hoconResource);
+    return convert(object, outClass, hocon, inPath, outPath);
+  }
+
+  public Object convert(Object object, Class outClass, Config hocon, String outPath) {
+    return convert(object, outClass, hocon, null, outPath);
+  }
+
+  /**
+   * convert.
+   * @param object object
+   * @param outClass outClass
+   * @param hocon hocon
+   * @param inPath inPath
+   * @param outPath outPath
+   * @return result
+   */
+  public Object convert(Object object, Class outClass, Config hocon, String inPath, String outPath) {
+    String json = null;
+    Object outDoc = null;
+    if ( object instanceof String ) {
+      json = (String) object;
+    } else {
+      try {
+        json = mapper.writeValueAsString(object);
+      } catch (JsonProcessingException ex) {
+        LOGGER.warn("Failed to process input:", object);
+        return outDoc;
+      }
     }
 
-    public Object convert(Object object, Class outClass, String hoconResource, String inPath, String outPath) {
-        Config hocon = ConfigFactory.parseResources(hoconResource);
-        return convert(object, outClass, hocon, inPath, outPath);
+    Config base;
+    if( inPath == null) {
+      base = ConfigFactory.parseString(json);
+    } else {
+      ObjectNode node;
+      try {
+        node = mapper.readValue(json, ObjectNode.class);
+        ObjectNode root = mapper.createObjectNode();
+        root.set(inPath, node);
+        json = mapper.writeValueAsString(root);
+        base = ConfigFactory.parseString(json);
+      } catch (Exception ex) {
+        LOGGER.warn("Failed to process input:", object);
+        return outDoc;
+      }
     }
 
-    public Object convert(Object object, Class outClass, Config hocon, String outPath) {
-        return convert(object, outClass, hocon, null, outPath);
+    Config converted = hocon.withFallback(base);
+
+    String outJson = null;
+    try {
+      if( outPath == null ) {
+        outJson = converted.resolve().root().render(ConfigRenderOptions.concise());
+      } else {
+        Config resolved = converted.resolve();
+        ConfigObject outObject = resolved.withOnlyPath(outPath).root();
+        ConfigValue outValue = outObject.get(outPath);
+        outJson = outValue.render(ConfigRenderOptions.concise());
+      }
+    } catch (Exception ex) {
+      LOGGER.warn("Failed to convert:", json);
+      LOGGER.warn(ex.getMessage());
     }
-
-    public Object convert(Object object, Class outClass, Config hocon, String inPath, String outPath) {
-        String json = null;
-        Object outDoc = null;
-        if( object instanceof String ) {
-            json = (String) object;
-        } else {
-            try {
-                json = mapper.writeValueAsString(object);
-            } catch (JsonProcessingException e) {
-                LOGGER.warn("Failed to process input:", object);
-                return outDoc;
-            }
-        }
-
-        Config base;
-        if( inPath == null)
-            base = ConfigFactory.parseString(json);
-        else {
-            ObjectNode node;
-            try {
-                node = mapper.readValue(json, ObjectNode.class);
-                ObjectNode root = mapper.createObjectNode();
-                root.set(inPath, node);
-                json = mapper.writeValueAsString(root);
-                base = ConfigFactory.parseString(json);
-            } catch (Exception e) {
-                LOGGER.warn("Failed to process input:", object);
-                return outDoc;
-            }
-        }
-
-        Config converted = hocon.withFallback(base);
-
-        String outJson = null;
-        try {
-            if( outPath == null )
-                outJson = converted.resolve().root().render(ConfigRenderOptions.concise());
-            else {
-                Config resolved = converted.resolve();
-                ConfigObject outObject = resolved.withOnlyPath(outPath).root();
-                ConfigValue outValue = outObject.get(outPath);
-                outJson = outValue.render(ConfigRenderOptions.concise());
-            }
-        } catch (Exception e) {
-            LOGGER.warn("Failed to convert:", json);
-            LOGGER.warn(e.getMessage());
-        }
-        if( outClass == String.class )
-            return outJson;
-        else {
-            try {
-                outDoc = mapper.readValue( outJson, outClass );
-            } catch (IOException e) {
-                LOGGER.warn("Failed to convert:", object);
-            }
-        }
-        return outDoc;
+    if ( outClass == String.class )
+      return outJson;
+    else {
+      try {
+        outDoc = mapper.readValue( outJson, outClass );
+      } catch (IOException ex) {
+        LOGGER.warn("Failed to convert:", object);
+      }
     }
+    return outDoc;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/LineReadWriteUtil.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/LineReadWriteUtil.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/LineReadWriteUtil.java
index a38568b..d245c3e 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/LineReadWriteUtil.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/LineReadWriteUtil.java
@@ -42,185 +42,219 @@ import java.util.Map;
  */
 public class LineReadWriteUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TypeConverterUtil.class);
-
-    private static Map<LineReadWriteConfiguration, LineReadWriteUtil> INSTANCE_MAP = Maps.newConcurrentMap();
-
-    private final static List<String> DEFAULT_FIELDS = Lists.newArrayList("ID", "SEQ", "TS", "META", "DOC");
+  private static final Logger LOGGER = LoggerFactory.getLogger(TypeConverterUtil.class);
+
+  private static Map<LineReadWriteConfiguration, LineReadWriteUtil> INSTANCE_MAP = Maps.newConcurrentMap();
+
+  private static final List<String> DEFAULT_FIELDS = Lists.newArrayList("ID", "SEQ", "TS", "META", "DOC");
+
+  private List<String> fields;
+  private String fieldDelimiter = "\t";
+  private String lineDelimiter = "\n";
+  private String encoding = "UTF-8";
+
+  private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+
+  private LineReadWriteUtil() {
+  }
+
+  private LineReadWriteUtil(LineReadWriteConfiguration configuration) {
+    this.fields = configuration.getFields();
+    this.fieldDelimiter = configuration.getFieldDelimiter();
+    this.lineDelimiter = configuration.getLineDelimiter();
+    this.encoding = configuration.getEncoding();
+  }
+
+  public static LineReadWriteUtil getInstance() {
+    return getInstance(new LineReadWriteConfiguration());
+  }
+
+  /**
+   * getInstance.
+   * @param configuration
+   * @return result
+   */
+  public static LineReadWriteUtil getInstance(LineReadWriteConfiguration configuration) {
+    if ( INSTANCE_MAP.containsKey(configuration)
+        &&
+        INSTANCE_MAP.get(configuration) != null) {
+      return INSTANCE_MAP.get(configuration);
+    } else {
+      INSTANCE_MAP.put(configuration, new LineReadWriteUtil(configuration));
+      return INSTANCE_MAP.get(configuration);
+    }
+  }
+
+  /**
+   * processLine
+   * @param line
+   * @return result
+   */
+  public StreamsDatum processLine(String line) {
+
+    List<String> expectedFields = fields;
+    if ( line.endsWith(lineDelimiter)) {
+      line = trimLineDelimiter(line);
+    }
+    String[] parsedFields = line.split(fieldDelimiter);
 
-    private List<String> fields;
-    private String fieldDelimiter = "\t";
-    private String lineDelimiter = "\n";
-    private String encoding = "UTF-8";
+    if ( parsedFields.length == 0) {
+      return null;
+    }
 
-    private static ObjectMapper MAPPER = StreamsJacksonMapper.getInstance();
+    String id = null;
+    DateTime ts = null;
+    BigInteger seq = null;
+    Map<String, Object> metadata = null;
+    String json = null;
 
-    private LineReadWriteUtil() {
+    if ( expectedFields.contains( FieldConstants.DOC )
+        && parsedFields.length > expectedFields.indexOf(FieldConstants.DOC)) {
+      json = parsedFields[expectedFields.indexOf(FieldConstants.DOC)];
     }
 
-    private LineReadWriteUtil(LineReadWriteConfiguration configuration) {
-        this.fields = configuration.getFields();
-        this.fieldDelimiter = configuration.getFieldDelimiter();
-        this.lineDelimiter = configuration.getLineDelimiter();
-        this.encoding = configuration.getEncoding();
+    if ( expectedFields.contains( FieldConstants.ID )
+        && parsedFields.length > expectedFields.indexOf(FieldConstants.ID)) {
+      id = parsedFields[expectedFields.indexOf(FieldConstants.ID)];
     }
-
-    public static LineReadWriteUtil getInstance() {
-        return getInstance(new LineReadWriteConfiguration());
+    if ( expectedFields.contains( FieldConstants.SEQ )
+        && parsedFields.length > expectedFields.indexOf(FieldConstants.SEQ)) {
+      try {
+        seq = new BigInteger(parsedFields[expectedFields.indexOf(FieldConstants.SEQ)]);
+      } catch ( NumberFormatException nfe ) {
+        LOGGER.warn("invalid sequence number {}", nfe);
+      }
     }
-
-    public static LineReadWriteUtil getInstance(LineReadWriteConfiguration configuration) {
-        if( INSTANCE_MAP.containsKey(configuration) &&
-            INSTANCE_MAP.get(configuration) != null)
-            return INSTANCE_MAP.get(configuration);
-        else {
-            INSTANCE_MAP.put(configuration, new LineReadWriteUtil(configuration));
-            return INSTANCE_MAP.get(configuration);
-        }
+    if ( expectedFields.contains( FieldConstants.TS )
+        && parsedFields.length > expectedFields.indexOf(FieldConstants.TS)) {
+      ts = parseTs(parsedFields[expectedFields.indexOf(FieldConstants.TS)]);
     }
-
-    public StreamsDatum processLine(String line) {
-
-        List<String> expectedFields = fields;
-        if( line.endsWith(lineDelimiter)) line = trimLineDelimiter(line);
-        String[] parsedFields = line.split(fieldDelimiter);
-
-        if( parsedFields.length == 0)
-            return null;
-
-        String id = null;
-        DateTime ts = null;
-        BigInteger seq = null;
-        Map<String, Object> metadata = null;
-        String json = null;
-
-        if( expectedFields.contains( FieldConstants.DOC )
-                && parsedFields.length > expectedFields.indexOf(FieldConstants.DOC)) {
-            json = parsedFields[expectedFields.indexOf(FieldConstants.DOC)];
-        }
-
-        if( expectedFields.contains( FieldConstants.ID )
-                && parsedFields.length > expectedFields.indexOf(FieldConstants.ID)) {
-            id = parsedFields[expectedFields.indexOf(FieldConstants.ID)];
-        }
-        if( expectedFields.contains( FieldConstants.SEQ )
-                && parsedFields.length > expectedFields.indexOf(FieldConstants.SEQ)) {
-            try {
-                seq = new BigInteger(parsedFields[expectedFields.indexOf(FieldConstants.SEQ)]);
-            } catch( NumberFormatException nfe )
-            { LOGGER.warn("invalid sequence number {}", nfe); }
-        }
-        if( expectedFields.contains( FieldConstants.TS )
-                && parsedFields.length > expectedFields.indexOf(FieldConstants.TS)) {
-            ts = parseTs(parsedFields[expectedFields.indexOf(FieldConstants.TS)]);
-        }
-        if( expectedFields.contains( FieldConstants.META )
-                && parsedFields.length > expectedFields.indexOf(FieldConstants.META)) {
-            metadata = parseMap(parsedFields[expectedFields.indexOf(FieldConstants.META)]);
-        }
-
-        StreamsDatum datum = new StreamsDatum(json);
-        datum.setId(id);
-        datum.setTimestamp(ts);
-        datum.setMetadata(metadata);
-        datum.setSequenceid(seq);
-        return datum;
-
+    if ( expectedFields.contains( FieldConstants.META )
+        && parsedFields.length > expectedFields.indexOf(FieldConstants.META)) {
+      metadata = parseMap(parsedFields[expectedFields.indexOf(FieldConstants.META)]);
     }
 
-    public String convertResultToString(StreamsDatum entry) {
-        String metadataJson = null;
-        try {
-            metadataJson = MAPPER.writeValueAsString(entry.getMetadata());
-        } catch (JsonProcessingException e) {
-            LOGGER.warn("Error converting metadata to a string", e);
-        }
+    StreamsDatum datum = new StreamsDatum(json);
+    datum.setId(id);
+    datum.setTimestamp(ts);
+    datum.setMetadata(metadata);
+    datum.setSequenceid(seq);
+    return datum;
+
+  }
+
+  /**
+   * convertResultToString
+   * @param entry
+   * @return result
+   */
+  public String convertResultToString(StreamsDatum entry) {
+    String metadataJson = null;
+    try {
+      metadataJson = MAPPER.writeValueAsString(entry.getMetadata());
+    } catch (JsonProcessingException ex) {
+      LOGGER.warn("Error converting metadata to a string", ex);
+    }
 
-        String documentJson = null;
-        try {
-            if( entry.getDocument() instanceof String )
-                documentJson = (String)entry.getDocument();
-            else
-                documentJson = MAPPER.writeValueAsString(entry.getDocument());
-        } catch (JsonProcessingException e) {
-            LOGGER.warn("Error converting document to string", e);
-        }
+    String documentJson = null;
+    try {
+      if ( entry.getDocument() instanceof String ) {
+        documentJson = (String) entry.getDocument();
+      } else {
+        documentJson = MAPPER.writeValueAsString(entry.getDocument());
+      }
+    } catch (JsonProcessingException ex) {
+      LOGGER.warn("Error converting document to string", ex);
+    }
 
-        if (Strings.isNullOrEmpty(documentJson))
-            return null;
-        else {
-            StringBuilder stringBuilder = new StringBuilder();
-            Iterator<String> fields = this.fields.iterator();
-            List<String> fielddata = Lists.newArrayList();
-            Joiner joiner = Joiner.on(fieldDelimiter).useForNull("");
-            while( fields.hasNext() ) {
-                String field = fields.next();
-                if( field.equals(FieldConstants.DOC) )
-                    fielddata.add(documentJson);
-                else if( field.equals(FieldConstants.ID) )
-                    fielddata.add(entry.getId());
-                else if( field.equals(FieldConstants.SEQ) )
-                    if( entry.getSequenceid() != null)
-                        fielddata.add(entry.getSequenceid().toString());
-                    else
-                        fielddata.add("null");
-                else if( field.equals(FieldConstants.TS) )
-                    if( entry.getTimestamp() != null )
-                        fielddata.add(entry.getTimestamp().toString());
-                    else
-                        fielddata.add(DateTime.now().toString());
-                else if( field.equals(FieldConstants.META) )
-                    fielddata.add(metadataJson);
-                else if( entry.getMetadata().containsKey(field)) {
-                    fielddata.add(entry.getMetadata().get(field).toString());
-                } else {
-                    fielddata.add(null);
-                }
-
-            }
-            joiner.appendTo(stringBuilder, fielddata);
-            return stringBuilder.toString();
+    if (Strings.isNullOrEmpty(documentJson)) {
+      return null;
+    } else {
+      StringBuilder stringBuilder = new StringBuilder();
+      Iterator<String> fields = this.fields.iterator();
+      List<String> fielddata = Lists.newArrayList();
+      Joiner joiner = Joiner.on(fieldDelimiter).useForNull("");
+      while( fields.hasNext() ) {
+        String field = fields.next();
+        if ( field.equals(FieldConstants.DOC) ) {
+          fielddata.add(documentJson);
+        } else if ( field.equals(FieldConstants.ID) ) {
+          fielddata.add(entry.getId());
+        } else if ( field.equals(FieldConstants.SEQ) ) {
+          if (entry.getSequenceid() != null) {
+            fielddata.add(entry.getSequenceid().toString());
+          } else {
+            fielddata.add("null");
+          }
+        } else if ( field.equals(FieldConstants.TS) ) {
+          if (entry.getTimestamp() != null) {
+            fielddata.add(entry.getTimestamp().toString());
+          } else {
+            fielddata.add(DateTime.now().toString());
+          }
+        } else if ( field.equals(FieldConstants.META) ) {
+          fielddata.add(metadataJson);
+        } else if ( entry.getMetadata().containsKey(field)) {
+          fielddata.add(entry.getMetadata().get(field).toString());
+        } else {
+          fielddata.add(null);
         }
+      }
+      joiner.appendTo(stringBuilder, fielddata);
+      return stringBuilder.toString();
     }
-
-    public DateTime parseTs(String field) {
-
-        DateTime timestamp = null;
+  }
+
+  /**
+   * parseTs
+   * @param field
+   * @return
+   */
+  public DateTime parseTs(String field) {
+
+    DateTime timestamp = null;
+    try {
+      long longts = Long.parseLong(field);
+      timestamp = new DateTime(longts);
+    } catch ( Exception e1 ) {
+      try {
+        timestamp = DateTime.parse(field);
+      } catch ( Exception e2 ) {
         try {
-            long longts = Long.parseLong(field);
-            timestamp = new DateTime(longts);
-        } catch ( Exception e ) {
-            try {
-                timestamp = DateTime.parse(field);
-            } catch ( Exception e2 ) {
-                try {
-                    timestamp = MAPPER.readValue(field, DateTime.class);
-                } catch ( Exception e3 ) {
-                    LOGGER.warn("Could not parse timestamp:{} ", field);
-                }
-            }
+          timestamp = MAPPER.readValue(field, DateTime.class);
+        } catch ( Exception e3 ) {
+          LOGGER.warn("Could not parse timestamp:{} ", field);
         }
-
-        return timestamp;
+      }
     }
 
-    public Map<String, Object> parseMap(String field) {
+    return timestamp;
+  }
 
-        Map<String, Object> metadata = null;
+  /**
+   * parseMap
+   * @param field
+   * @return result
+   */
+  public Map<String, Object> parseMap(String field) {
 
-        try {
-            JsonNode jsonNode = MAPPER.readValue(field, JsonNode.class);
-            metadata = MAPPER.convertValue(jsonNode, Map.class);
-        } catch (Exception e) {
-            LOGGER.warn("failed in parseMap: " + e.getMessage());
-        }
-        return metadata;
-    }
+    Map<String, Object> metadata = null;
 
-    private String trimLineDelimiter(String str) {
-        if( !Strings.isNullOrEmpty(str))
-            if( str.endsWith(lineDelimiter))
-                return str.substring(0,str.length()-1);
-        return str;
+    try {
+      JsonNode jsonNode = MAPPER.readValue(field, JsonNode.class);
+      metadata = MAPPER.convertValue(jsonNode, Map.class);
+    } catch (Exception ex) {
+      LOGGER.warn("failed in parseMap: " + ex.getMessage());
+    }
+    return metadata;
+  }
+
+  private String trimLineDelimiter(String str) {
+    if ( !Strings.isNullOrEmpty(str)) {
+      if (str.endsWith(lineDelimiter)) {
+        return str.substring(0, str.length() - 1);
+      }
     }
+    return str;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterProcessor.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterProcessor.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterProcessor.java
index edd70f4..a269f4d 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterProcessor.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterProcessor.java
@@ -16,13 +16,16 @@ KIND, either express or implied.  See the License for the
 specific language governing permissions and limitations
 under the License.
 */
+
 package org.apache.streams.converter;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.jackson.StreamsJacksonMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -32,65 +35,68 @@ import java.util.List;
 /**
  * TypeConverterProcessor converts between String json and jackson-compatible POJO objects.
  *
+ * <p/>
  * Activity is one supported jackson-compatible POJO, so JSON String and objects with structual similarities
  *   to Activity can be converted to Activity objects.
  *
+ * <p/>
  * However, conversion to Activity should probably use {@link org.apache.streams.converter.ActivityConverterProcessor}
  *
  */
 public class TypeConverterProcessor implements StreamsProcessor, Serializable {
 
-    public static final String STREAMS_ID = "TypeConverterProcessor";
+  public static final String STREAMS_ID = "TypeConverterProcessor";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TypeConverterProcessor.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TypeConverterProcessor.class);
 
-    private List<String> formats = Lists.newArrayList();
+  private List<String> formats = Lists.newArrayList();
 
-    protected ObjectMapper mapper;
+  protected ObjectMapper mapper;
 
-    protected Class outClass;
+  protected Class outClass;
 
-    public TypeConverterProcessor(Class outClass) {
-        this.outClass = outClass;
-    }
-
-    public TypeConverterProcessor(Class outClass, List<String> formats) {
-        this(outClass);
-        this.formats = formats;
-    }
+  public TypeConverterProcessor(Class outClass) {
+    this.outClass = outClass;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public TypeConverterProcessor(Class outClass, List<String> formats) {
+    this(outClass);
+    this.formats = formats;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        List<StreamsDatum> result = Lists.newLinkedList();
-        Object inDoc = entry.getDocument();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        Object outDoc = TypeConverterUtil.getInstance().convert(inDoc, outClass, mapper);
+    List<StreamsDatum> result = Lists.newLinkedList();
+    Object inDoc = entry.getDocument();
 
-        if( outDoc != null ) {
-            entry.setDocument(outDoc);
-            result.add(entry);
-        }
+    Object outDoc = TypeConverterUtil.getInstance().convert(inDoc, outClass, mapper);
 
-        return result;
+    if ( outDoc != null ) {
+      entry.setDocument(outDoc);
+      result.add(entry);
     }
 
-    @Override
-    public void prepare(Object configurationObject) {
-        if( formats.size() > 0 )
-            this.mapper = StreamsJacksonMapper.getInstance(formats);
-        else
-            this.mapper = StreamsJacksonMapper.getInstance();
-    }
+    return result;
+  }
 
-    @Override
-    public void cleanUp() {
-        this.mapper = null;
+  @Override
+  public void prepare(Object configurationObject) {
+    if ( formats.size() > 0 ) {
+      this.mapper = StreamsJacksonMapper.getInstance(formats);
+    } else {
+      this.mapper = StreamsJacksonMapper.getInstance();
     }
+  }
+
+  @Override
+  public void cleanUp() {
+    this.mapper = null;
+  }
 
-};
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterUtil.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterUtil.java b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterUtil.java
index 4ace9c4..8843d0e 100644
--- a/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterUtil.java
+++ b/streams-components/streams-converters/src/main/java/org/apache/streams/converter/TypeConverterUtil.java
@@ -18,9 +18,11 @@
 
 package org.apache.streams.converter;
 
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -28,49 +30,56 @@ import java.io.IOException;
 
 /**
  * TypeConverterUtil supports TypeConverterProcessor in converting between String json and
- * jackson-compatible POJO objects
+ * jackson-compatible POJO objects.
  */
 public class TypeConverterUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(TypeConverterUtil.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(TypeConverterUtil.class);
 
-    private static final TypeConverterUtil INSTANCE = new TypeConverterUtil();
+  private static final TypeConverterUtil INSTANCE = new TypeConverterUtil();
 
-    public static TypeConverterUtil getInstance(){
-        return INSTANCE;
-    }
+  public static TypeConverterUtil getInstance() {
+    return INSTANCE;
+  }
+
+  public Object convert(Object object, Class outClass) {
+    return TypeConverterUtil.getInstance().convert(object, outClass, StreamsJacksonMapper.getInstance());
+  }
 
-    public Object convert(Object object, Class outClass) {
-        return TypeConverterUtil.getInstance().convert(object, outClass, StreamsJacksonMapper.getInstance());
+  /**
+   * convert
+   * @param object
+   * @param outClass
+   * @param mapper
+   * @return
+   */
+  public Object convert(Object object, Class outClass, ObjectMapper mapper) {
+    ObjectNode node = null;
+    Object outDoc = null;
+    if ( object instanceof String ) {
+      try {
+        node = mapper.readValue((String)object, ObjectNode.class);
+      } catch (IOException ex) {
+        LOGGER.warn(ex.getMessage());
+        LOGGER.warn(object.toString());
+      }
+    } else {
+      node = mapper.convertValue(object, ObjectNode.class);
     }
 
-    public Object convert(Object object, Class outClass, ObjectMapper mapper) {
-        ObjectNode node = null;
-        Object outDoc = null;
-        if( object instanceof String ) {
-            try {
-                node = mapper.readValue((String)object, ObjectNode.class);
-            } catch (IOException e) {
-               LOGGER.warn(e.getMessage());
-                LOGGER.warn(object.toString());
-            }
+    if(node != null) {
+      try {
+        if ( outClass == String.class ) {
+          outDoc = mapper.writeValueAsString(node);
         } else {
-            node = mapper.convertValue(object, ObjectNode.class);
+          outDoc = mapper.convertValue(node, outClass);
         }
-
-        if(node != null) {
-            try {
-                if( outClass == String.class )
-                    outDoc = mapper.writeValueAsString(node);
-                else
-                    outDoc = mapper.convertValue(node, outClass);
-
-            } catch (Throwable e) {
-                LOGGER.warn(e.getMessage());
-                LOGGER.warn(node.toString());
-            }
-        }
-
-        return outDoc;
+      } catch (Throwable ex) {
+        LOGGER.warn(ex.getMessage());
+        LOGGER.warn(node.toString());
+      }
     }
+
+    return outDoc;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionDropFilter.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionDropFilter.java b/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionDropFilter.java
index e0c7a68..ed40a17 100644
--- a/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionDropFilter.java
+++ b/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionDropFilter.java
@@ -18,15 +18,16 @@
 
 package org.apache.streams.filters;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.verbs.ObjectCombination;
 import org.apache.streams.verbs.VerbDefinition;
 import org.apache.streams.verbs.VerbDefinitionMatchUtil;
 import org.apache.streams.verbs.VerbDefinitionResolver;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,57 +40,60 @@ import java.util.Set;
  */
 public class VerbDefinitionDropFilter implements StreamsProcessor {
 
-    public static final String STREAMS_ID = "VerbDefinitionDropFilter";
+  public static final String STREAMS_ID = "VerbDefinitionDropFilter";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(VerbDefinitionDropFilter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(VerbDefinitionDropFilter.class);
 
-    protected Set<VerbDefinition> verbDefinitionSet;
-    protected VerbDefinitionResolver resolver;
+  protected Set<VerbDefinition> verbDefinitionSet;
+  protected VerbDefinitionResolver resolver;
 
-    public VerbDefinitionDropFilter() {
-        // get with reflection
-    }
-
-    public VerbDefinitionDropFilter(Set<VerbDefinition> verbDefinitionSet) {
-        this();
-        this.verbDefinitionSet = verbDefinitionSet;
-    }
+  public VerbDefinitionDropFilter() {
+    // get with reflection
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public VerbDefinitionDropFilter(Set<VerbDefinition> verbDefinitionSet) {
+    this();
+    this.verbDefinitionSet = verbDefinitionSet;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        List<StreamsDatum> result = Lists.newArrayList();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        LOGGER.debug("{} filtering {}", STREAMS_ID, entry.getDocument().getClass());
+    List<StreamsDatum> result = Lists.newArrayList();
 
-        Activity activity;
+    LOGGER.debug("{} filtering {}", STREAMS_ID, entry.getDocument().getClass());
 
-        Preconditions.checkArgument(entry.getDocument() instanceof Activity);
+    Activity activity;
 
-        activity = (Activity) entry.getDocument();
+    Preconditions.checkArgument(entry.getDocument() instanceof Activity);
 
-        if( VerbDefinitionMatchUtil.match(activity, this.verbDefinitionSet) == false )
-            result.add(entry);
+    activity = (Activity) entry.getDocument();
 
-        return result;
+    if ( VerbDefinitionMatchUtil.match(activity, this.verbDefinitionSet) == false ) {
+      result.add(entry);
     }
 
-    @Override
-    public void prepare(Object o) {
-        if( verbDefinitionSet != null)
-            resolver = new VerbDefinitionResolver(verbDefinitionSet);
-        else resolver = new VerbDefinitionResolver();
-        Preconditions.checkNotNull(resolver);
-    }
+    return result;
+  }
 
-    @Override
-    public void cleanUp() {
-        // noOp
+  @Override
+  public void prepare(Object configuration) {
+    if ( verbDefinitionSet != null) {
+      resolver = new VerbDefinitionResolver(verbDefinitionSet);
+    } else {
+      resolver = new VerbDefinitionResolver();
     }
+    Preconditions.checkNotNull(resolver);
+  }
+
+  @Override
+  public void cleanUp() {
+    // noOp
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionKeepFilter.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionKeepFilter.java b/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionKeepFilter.java
index 82e8c99..7562905 100644
--- a/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionKeepFilter.java
+++ b/streams-components/streams-filters/src/main/java/org/apache/streams/filters/VerbDefinitionKeepFilter.java
@@ -18,19 +18,21 @@
 
 package org.apache.streams.filters;
 
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsProcessor;
 import org.apache.streams.pojo.json.Activity;
-import org.apache.streams.verbs.ObjectCombination;
 import org.apache.streams.verbs.VerbDefinition;
 import org.apache.streams.verbs.VerbDefinitionMatchUtil;
 import org.apache.streams.verbs.VerbDefinitionResolver;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.List;
+import java.util.Set;
 
 /**
  * Checks one or more verb definitions against a stream of Activity documents, and drops any activities
@@ -38,57 +40,60 @@ import java.util.*;
  */
 public class VerbDefinitionKeepFilter implements StreamsProcessor {
 
-    public static final String STREAMS_ID = "VerbDefinitionKeepFilter";
+  public static final String STREAMS_ID = "VerbDefinitionKeepFilter";
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(VerbDefinitionKeepFilter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(VerbDefinitionKeepFilter.class);
 
-    protected Set<VerbDefinition> verbDefinitionSet;
-    protected VerbDefinitionResolver resolver;
+  protected Set<VerbDefinition> verbDefinitionSet;
+  protected VerbDefinitionResolver resolver;
 
-    public VerbDefinitionKeepFilter() {
-        // get with reflection
-    }
-
-    public VerbDefinitionKeepFilter(Set<VerbDefinition> verbDefinitionSet) {
-        this();
-        this.verbDefinitionSet = verbDefinitionSet;
-    }
+  public VerbDefinitionKeepFilter() {
+    // get with reflection
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  public VerbDefinitionKeepFilter(Set<VerbDefinition> verbDefinitionSet) {
+    this();
+    this.verbDefinitionSet = verbDefinitionSet;
+  }
 
-    @Override
-    public List<StreamsDatum> process(StreamsDatum entry) {
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-        List<StreamsDatum> result = Lists.newArrayList();
+  @Override
+  public List<StreamsDatum> process(StreamsDatum entry) {
 
-        LOGGER.debug("{} filtering {}", STREAMS_ID, entry.getDocument().getClass());
+    List<StreamsDatum> result = Lists.newArrayList();
 
-        Activity activity;
+    LOGGER.debug("{} filtering {}", STREAMS_ID, entry.getDocument().getClass());
 
-        Preconditions.checkArgument(entry.getDocument() instanceof Activity);
+    Activity activity;
 
-        activity = (Activity) entry.getDocument();
+    Preconditions.checkArgument(entry.getDocument() instanceof Activity);
 
-        if( VerbDefinitionMatchUtil.match(activity, this.verbDefinitionSet) == true )
-            result.add(entry);
+    activity = (Activity) entry.getDocument();
 
-        return result;
+    if ( VerbDefinitionMatchUtil.match(activity, this.verbDefinitionSet) == true ) {
+      result.add(entry);
     }
 
-    @Override
-    public void prepare(Object o) {
-        if( verbDefinitionSet != null)
-            resolver = new VerbDefinitionResolver(verbDefinitionSet);
-        else resolver = new VerbDefinitionResolver();
-        Preconditions.checkNotNull(resolver);
-    }
+    return result;
+  }
 
-    @Override
-    public void cleanUp() {
-        // noOp
+  @Override
+  public void prepare(Object configuration) {
+    if ( verbDefinitionSet != null ) {
+      resolver = new VerbDefinitionResolver(verbDefinitionSet);
+    } else {
+      resolver = new VerbDefinitionResolver();
     }
+    Preconditions.checkNotNull(resolver);
+  }
+
+  @Override
+  public void cleanUp() {
+    // noOp
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-components/streams-http/src/main/java/org/apache/streams/components/http/persist/SimpleHTTPPostPersistWriter.java
----------------------------------------------------------------------
diff --git a/streams-components/streams-http/src/main/java/org/apache/streams/components/http/persist/SimpleHTTPPostPersistWriter.java b/streams-components/streams-http/src/main/java/org/apache/streams/components/http/persist/SimpleHTTPPostPersistWriter.java
index d8309d9..8cacf1f 100644
--- a/streams-components/streams-http/src/main/java/org/apache/streams/components/http/persist/SimpleHTTPPostPersistWriter.java
+++ b/streams-components/streams-http/src/main/java/org/apache/streams/components/http/persist/SimpleHTTPPostPersistWriter.java
@@ -18,11 +18,19 @@
 
 package org.apache.streams.components.http.persist;
 
+import org.apache.streams.components.http.HttpPersistWriterConfiguration;
+import org.apache.streams.config.ComponentConfigurator;
+import org.apache.streams.config.StreamsConfigurator;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsPersistWriter;
+import org.apache.streams.jackson.StreamsJacksonMapper;
+
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
+
 import org.apache.commons.codec.binary.Base64;
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpStatus;
@@ -33,12 +41,6 @@ import org.apache.http.entity.StringEntity;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClients;
 import org.apache.http.util.EntityUtils;
-import org.apache.streams.components.http.HttpPersistWriterConfiguration;
-import org.apache.streams.config.ComponentConfigurator;
-import org.apache.streams.config.StreamsConfigurator;
-import org.apache.streams.core.StreamsDatum;
-import org.apache.streams.core.StreamsPersistWriter;
-import org.apache.streams.jackson.StreamsJacksonMapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -51,183 +53,189 @@ import java.util.Map;
 
 public class SimpleHTTPPostPersistWriter implements StreamsPersistWriter {
 
-    private final static String STREAMS_ID = "SimpleHTTPPostPersistWriter";
+  private static final String STREAMS_ID = "SimpleHTTPPostPersistWriter";
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SimpleHTTPPostPersistWriter.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SimpleHTTPPostPersistWriter.class);
 
-    protected ObjectMapper mapper;
+  protected ObjectMapper mapper;
 
-    protected URIBuilder uriBuilder;
+  protected URIBuilder uriBuilder;
 
-    protected CloseableHttpClient httpclient;
+  protected CloseableHttpClient httpclient;
 
-    protected HttpPersistWriterConfiguration configuration;
+  protected HttpPersistWriterConfiguration configuration;
 
-    protected String authHeader;
+  protected String authHeader;
 
-    public SimpleHTTPPostPersistWriter() {
-        this(new ComponentConfigurator<>(HttpPersistWriterConfiguration.class)
-          .detectConfiguration(StreamsConfigurator.getConfig().getConfig("http")));
-    }
+  public SimpleHTTPPostPersistWriter() {
+    this(new ComponentConfigurator<>(HttpPersistWriterConfiguration.class)
+        .detectConfiguration(StreamsConfigurator.getConfig().getConfig("http")));
+  }
 
-    public SimpleHTTPPostPersistWriter(HttpPersistWriterConfiguration configuration) {
-        this.configuration = configuration;
-    }
+  public SimpleHTTPPostPersistWriter(HttpPersistWriterConfiguration configuration) {
+    this.configuration = configuration;
+  }
 
-    @Override
-    public String getId() {
-        return STREAMS_ID;
-    }
+  @Override
+  public String getId() {
+    return STREAMS_ID;
+  }
 
-    @Override
-    public void write(StreamsDatum entry) {
+  @Override
+  public void write(StreamsDatum entry) {
 
-        ObjectNode payload;
-        try {
-            payload = preparePayload(entry);
-        } catch( Exception e ) {
-            LOGGER.warn("Exception preparing payload, using empty payload");
-            payload = mapper.createObjectNode();
-        }
+    ObjectNode payload;
+    try {
+      payload = preparePayload(entry);
+    } catch ( Exception ex ) {
+      LOGGER.warn("Exception preparing payload, using empty payload");
+      payload = mapper.createObjectNode();
+    }
 
 
-        Map<String, String> params = prepareParams(entry);
+    Map<String, String> params = prepareParams(entry);
 
-        URI uri = prepareURI(params);
+    URI uri = prepareURI(params);
 
-        HttpPost httppost = prepareHttpPost(uri, payload);
+    HttpPost httppost = prepareHttpPost(uri, payload);
 
-        ObjectNode result = executePost(httppost);
+    ObjectNode result = executePost(httppost);
 
-        try {
-            LOGGER.debug(mapper.writeValueAsString(result));
-        } catch (JsonProcessingException e) {
-            LOGGER.warn("Non-json response", e.getMessage());
-        }
+    try {
+      LOGGER.debug(mapper.writeValueAsString(result));
+    } catch (JsonProcessingException ex) {
+      LOGGER.warn("Non-json response", ex.getMessage());
     }
-
-    /**
-     Override this to alter request URI
-     */
-    protected URI prepareURI(Map<String, String> params) {
-        URI uri = null;
-        for( Map.Entry<String,String> param : params.entrySet()) {
-            uriBuilder = uriBuilder.setParameter(param.getKey(), param.getValue());
-        }
-        try {
-            uri = uriBuilder.build();
-        } catch (URISyntaxException e) {
-            LOGGER.error("URI error {}", uriBuilder.toString());
-        }
-        return uri;
+  }
+
+  /**
+   Override this to alter request URI.
+   */
+  protected URI prepareURI(Map<String, String> params) {
+    URI uri = null;
+    for ( Map.Entry<String,String> param : params.entrySet()) {
+      uriBuilder = uriBuilder.setParameter(param.getKey(), param.getValue());
     }
-
-    /**
-     Override this to add parameters to the request
-     */
-    protected Map<String, String> prepareParams(StreamsDatum entry) {
-        return new HashMap<>();
+    try {
+      uri = uriBuilder.build();
+    } catch (URISyntaxException ex) {
+      LOGGER.error("URI error {}", uriBuilder.toString());
     }
-
-    /**
-     Override this to alter json payload on to the request
-     */
-    protected ObjectNode preparePayload(StreamsDatum entry) throws Exception {
-
-        if( entry.getDocument() != null ) {
-            if( entry.getDocument() instanceof ObjectNode )
-                return (ObjectNode) entry.getDocument();
-            else return mapper.convertValue(entry.getDocument(), ObjectNode.class);
-        }
-        else return null;
+    return uri;
+  }
+
+  /**
+   Override this to add parameters to the request.
+   */
+  protected Map<String, String> prepareParams(StreamsDatum entry) {
+    return new HashMap<>();
+  }
+
+  /**
+   Override this to alter json payload on to the request.
+   */
+  protected ObjectNode preparePayload(StreamsDatum entry) throws Exception {
+
+    if ( entry.getDocument() != null ) {
+      if ( entry.getDocument() instanceof ObjectNode ) {
+        return (ObjectNode) entry.getDocument();
+      } else {
+        return mapper.convertValue(entry.getDocument(), ObjectNode.class);
+      }
+    } else {
+      return null;
     }
-
-    /**
-     Override this to add headers to the request
-     */
-    public HttpPost prepareHttpPost(URI uri, ObjectNode payload) {
-        HttpPost httppost = new HttpPost(uri);
-        httppost.addHeader("content-type", this.configuration.getContentType());
-        httppost.addHeader("accept-charset", "UTF-8");
-        if( !Strings.isNullOrEmpty(authHeader))
-            httppost.addHeader("Authorization", "Basic " + authHeader);
-        try {
-            String entity = mapper.writeValueAsString(payload);
-            httppost.setEntity(new StringEntity(entity));
-        } catch (JsonProcessingException | UnsupportedEncodingException e) {
-            LOGGER.warn(e.getMessage());
-        }
-        return httppost;
+  }
+
+  /**
+   Override this to add headers to the request.
+   */
+  public HttpPost prepareHttpPost(URI uri, ObjectNode payload) {
+    HttpPost httppost = new HttpPost(uri);
+    httppost.addHeader("content-type", this.configuration.getContentType());
+    httppost.addHeader("accept-charset", "UTF-8");
+    if ( !Strings.isNullOrEmpty(authHeader)) {
+      httppost.addHeader("Authorization", "Basic " + authHeader);
     }
-
-    protected ObjectNode executePost(HttpPost httpPost) {
-
-        Preconditions.checkNotNull(httpPost);
-
-        ObjectNode result = null;
-
-        CloseableHttpResponse response = null;
-
-        String entityString;
-        try {
-            response = httpclient.execute(httpPost);
-            HttpEntity entity = response.getEntity();
-            // TODO: handle retry
-            if (response.getStatusLine() != null && response.getStatusLine().getStatusCode() >= HttpStatus.SC_OK && entity != null) {
-                entityString = EntityUtils.toString(entity);
-                result = mapper.readValue(entityString, ObjectNode.class);
-            }
-        } catch (IOException e) {
-            LOGGER.error("IO error:\n{}\n{}\n{}", httpPost.toString(), response, e.getMessage());
-        } finally {
-            try {
-                if (response != null) {
-                    response.close();
-                }
-            } catch (IOException ignored) {}
+    try {
+      String entity = mapper.writeValueAsString(payload);
+      httppost.setEntity(new StringEntity(entity));
+    } catch (JsonProcessingException | UnsupportedEncodingException ex) {
+      LOGGER.warn(ex.getMessage());
+    }
+    return httppost;
+  }
+
+  protected ObjectNode executePost(HttpPost httpPost) {
+
+    Preconditions.checkNotNull(httpPost);
+
+    ObjectNode result = null;
+
+    CloseableHttpResponse response = null;
+
+    String entityString;
+    try {
+      response = httpclient.execute(httpPost);
+      HttpEntity entity = response.getEntity();
+      // TODO: handle retry
+      if (response.getStatusLine() != null && response.getStatusLine().getStatusCode() >= HttpStatus.SC_OK && entity != null) {
+        entityString = EntityUtils.toString(entity);
+        result = mapper.readValue(entityString, ObjectNode.class);
+      }
+    } catch (IOException ex) {
+      LOGGER.error("IO error:\n{}\n{}\n{}", httpPost.toString(), response, ex.getMessage());
+    } finally {
+      try {
+        if (response != null) {
+          response.close();
         }
-        return result;
+      } catch (IOException ignored) {
+        LOGGER.trace("IOException", ignored);
+      }
     }
+    return result;
+  }
 
-    @Override
-    public void prepare(Object configurationObject) {
+  @Override
+  public void prepare(Object configurationObject) {
 
-        mapper = StreamsJacksonMapper.getInstance();
+    mapper = StreamsJacksonMapper.getInstance();
 
-        uriBuilder = new URIBuilder()
-                .setScheme(this.configuration.getProtocol())
-                .setHost(this.configuration.getHostname())
-                .setPort(this.configuration.getPort().intValue())
-                .setPath(this.configuration.getResourcePath());
-
-        if( !Strings.isNullOrEmpty(configuration.getAccessToken()) )
-            uriBuilder = uriBuilder.addParameter("access_token", configuration.getAccessToken());
-        if( !Strings.isNullOrEmpty(configuration.getUsername())
-                && !Strings.isNullOrEmpty(configuration.getPassword())) {
-            String string = configuration.getUsername() + ":" + configuration.getPassword();
-            authHeader = Base64.encodeBase64String(string.getBytes());
-        }
-
-        httpclient = HttpClients.createDefault();
+    uriBuilder = new URIBuilder()
+        .setScheme(this.configuration.getProtocol())
+        .setHost(this.configuration.getHostname())
+        .setPort(this.configuration.getPort().intValue())
+        .setPath(this.configuration.getResourcePath());
 
+    if ( !Strings.isNullOrEmpty(configuration.getAccessToken()) ) {
+      uriBuilder = uriBuilder.addParameter("access_token", configuration.getAccessToken());
+    }
+    if ( !Strings.isNullOrEmpty(configuration.getUsername())        && !Strings.isNullOrEmpty(configuration.getPassword())) {
+      String string = configuration.getUsername() + ":" + configuration.getPassword();
+      authHeader = Base64.encodeBase64String(string.getBytes());
     }
 
-    @Override
-    public void cleanUp() {
-
-        LOGGER.info("shutting down SimpleHTTPPostPersistWriter");
-        try {
-            httpclient.close();
-        } catch (IOException e) {
-            LOGGER.error(e.getMessage());
-        } finally {
-            try {
-                httpclient.close();
-            } catch (IOException e) {
-                LOGGER.error(e.getMessage());
-            } finally {
-                httpclient = null;
-            }
-        }
+    httpclient = HttpClients.createDefault();
+
+  }
+
+  @Override
+  public void cleanUp() {
+
+    LOGGER.info("shutting down SimpleHTTPPostPersistWriter");
+    try {
+      httpclient.close();
+    } catch (IOException ex) {
+      LOGGER.error(ex.getMessage());
+    } finally {
+      try {
+        httpclient.close();
+      } catch (IOException e2) {
+        LOGGER.error(e2.getMessage());
+      } finally {
+        httpclient = null;
+      }
     }
+  }
 }


[06/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProviderTask.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProviderTask.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProviderTask.java
index 3405882..26272ea 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProviderTask.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsProviderTask.java
@@ -18,17 +18,22 @@
 
 package org.apache.streams.local.tasks;
 
-import com.google.common.util.concurrent.Uninterruptibles;
 import org.apache.streams.config.StreamsConfiguration;
-import org.apache.streams.core.*;
+import org.apache.streams.core.DatumStatus;
+import org.apache.streams.core.DatumStatusCountable;
+import org.apache.streams.core.DatumStatusCounter;
+import org.apache.streams.core.StreamsDatum;
+import org.apache.streams.core.StreamsProvider;
+import org.apache.streams.core.StreamsResultSet;
 import org.apache.streams.core.util.DatumUtils;
 import org.apache.streams.local.counters.StreamsTaskCounter;
+
+import com.google.common.util.concurrent.Uninterruptibles;
 import org.joda.time.DateTime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.math.BigInteger;
-import java.util.Map;
 import java.util.Queue;
 import java.util.UUID;
 import java.util.concurrent.BlockingQueue;
@@ -40,219 +45,219 @@ import java.util.concurrent.atomic.AtomicBoolean;
  */
 public class StreamsProviderTask extends BaseStreamsTask implements DatumStatusCountable {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(StreamsProviderTask.class);
+  private final static Logger LOGGER = LoggerFactory.getLogger(StreamsProviderTask.class);
 
-    public DatumStatusCounter getDatumStatusCounter() {
-        return this.statusCounter;
-    }
+  public DatumStatusCounter getDatumStatusCounter() {
+    return this.statusCounter;
+  }
 
-    private static enum Type {
-        PERPETUAL,
-        READ_CURRENT,
-        READ_NEW,
-        READ_RANGE
-    }
+  private static enum Type {
+    PERPETUAL,
+    READ_CURRENT,
+    READ_NEW,
+    READ_RANGE
+  }
 
-    private static final int START = 0;
-    private static final int END = 1;
+  private static final int START = 0;
+  private static final int END = 1;
 
-    private StreamsProvider provider;
-    private final AtomicBoolean keepRunning = new AtomicBoolean(true);
-    private final AtomicBoolean flushing = new AtomicBoolean(false);
-    private final AtomicBoolean started = new AtomicBoolean(false);
-    private Type type;
-    private BigInteger sequence;
-    private DateTime[] dateRange;
-    private StreamsConfiguration config;
+  private StreamsProvider provider;
+  private final AtomicBoolean keepRunning = new AtomicBoolean(true);
+  private final AtomicBoolean flushing = new AtomicBoolean(false);
+  private final AtomicBoolean started = new AtomicBoolean(false);
+  private Type type;
+  private BigInteger sequence;
+  private DateTime[] dateRange;
+  private StreamsConfiguration config;
 
-    private int timeout;
-    private long sleepTime;
-    private int zeros = 0;
-    private DatumStatusCounter statusCounter = new DatumStatusCounter();
-    private StreamsTaskCounter counter;
+  private int timeout;
+  private long sleepTime;
+  private int zeros = 0;
+  private DatumStatusCounter statusCounter = new DatumStatusCounter();
+  private StreamsTaskCounter counter;
 
-    /**
-     * Constructor for a StreamsProvider to execute {@link org.apache.streams.core.StreamsProvider:readCurrent()}
-     * @param provider
-     */
-    public StreamsProviderTask(StreamsProvider provider, boolean perpetual, StreamsConfiguration streamConfig) {
-        super(streamConfig);
-        streamConfig = super.streamConfig;
-        this.provider = provider;
-        if( perpetual )
-            this.type = Type.PERPETUAL;
-        else
-            this.type = Type.READ_CURRENT;
-        this.timeout = super.streamConfig.getProviderTimeoutMs().intValue();
-        this.sleepTime = streamConfig.getBatchFrequencyMs();
-    }
+  /**
+   * Constructor for a StreamsProvider to execute {@link org.apache.streams.core.StreamsProvider:readCurrent()}
+   * @param provider
+   */
+  public StreamsProviderTask(StreamsProvider provider, boolean perpetual, StreamsConfiguration streamConfig) {
+    super(streamConfig);
+    streamConfig = super.streamConfig;
+    this.provider = provider;
+    if( perpetual )
+      this.type = Type.PERPETUAL;
+    else
+      this.type = Type.READ_CURRENT;
+    this.timeout = super.streamConfig.getProviderTimeoutMs().intValue();
+    this.sleepTime = streamConfig.getBatchFrequencyMs();
+  }
 
-    /**
-     * Constructor for a StreamsProvider to execute {@link org.apache.streams.core.StreamsProvider:readNew(BigInteger)}
-     * @param provider
-     * @param sequence
-     */
-    public StreamsProviderTask(StreamsProvider provider, BigInteger sequence, StreamsConfiguration streamConfig) {
-        super(streamConfig);
-        this.provider = provider;
-        this.type = Type.READ_NEW;
-        this.sequence = sequence;
-        this.timeout = streamConfig.getProviderTimeoutMs().intValue();
-        this.sleepTime = streamConfig.getBatchFrequencyMs();
-    }
+  /**
+   * Constructor for a StreamsProvider to execute {@link org.apache.streams.core.StreamsProvider:readNew(BigInteger)}
+   * @param provider
+   * @param sequence
+   */
+  public StreamsProviderTask(StreamsProvider provider, BigInteger sequence, StreamsConfiguration streamConfig) {
+    super(streamConfig);
+    this.provider = provider;
+    this.type = Type.READ_NEW;
+    this.sequence = sequence;
+    this.timeout = streamConfig.getProviderTimeoutMs().intValue();
+    this.sleepTime = streamConfig.getBatchFrequencyMs();
+  }
 
-    /**
-     * Constructor for a StreamsProvider to execute {@link org.apache.streams.core.StreamsProvider:readRange(DateTime,DateTime)}
-     * @param provider
-     * @param start
-     * @param end
-     */
-    public StreamsProviderTask(StreamsProvider provider, DateTime start, DateTime end, StreamsConfiguration streamConfig) {
-        super(streamConfig);
-        this.provider = provider;
-        this.type = Type.READ_RANGE;
-        this.dateRange = new DateTime[2];
-        this.dateRange[START] = start;
-        this.dateRange[END] = end;
-        this.timeout = streamConfig.getProviderTimeoutMs().intValue();
-        this.sleepTime = streamConfig.getBatchFrequencyMs();
-    }
+  /**
+   * Constructor for a StreamsProvider to execute {@link org.apache.streams.core.StreamsProvider:readRange(DateTime,DateTime)}
+   * @param provider
+   * @param start
+   * @param end
+   */
+  public StreamsProviderTask(StreamsProvider provider, DateTime start, DateTime end, StreamsConfiguration streamConfig) {
+    super(streamConfig);
+    this.provider = provider;
+    this.type = Type.READ_RANGE;
+    this.dateRange = new DateTime[2];
+    this.dateRange[START] = start;
+    this.dateRange[END] = end;
+    this.timeout = streamConfig.getProviderTimeoutMs().intValue();
+    this.sleepTime = streamConfig.getBatchFrequencyMs();
+  }
 
-    public void setTimeout(int timeout) {
-        this.timeout = timeout;
-    }
+  public void setTimeout(int timeout) {
+    this.timeout = timeout;
+  }
 
-    public void setSleepTime(long sleepTime) {
-        this.sleepTime = sleepTime;
-    }
+  public void setSleepTime(long sleepTime) {
+    this.sleepTime = sleepTime;
+  }
 
-    @Override
-    public boolean isWaiting() {
-        return false; //providers don't have inbound queues
-    }
+  @Override
+  public boolean isWaiting() {
+    return false; //providers don't have inbound queues
+  }
 
-    @Override
-    public void stopTask() {
-        LOGGER.debug("Stopping Provider Task for {}", this.provider.getClass().getSimpleName());
-        this.keepRunning.set(false);
-    }
+  @Override
+  public void stopTask() {
+    LOGGER.debug("Stopping Provider Task for {}", this.provider.getClass().getSimpleName());
+    this.keepRunning.set(false);
+  }
 
-    @Override
-    public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue) {
-        throw new UnsupportedOperationException(this.getClass().getName()+" does not support method - setInputQueue()");
-    }
+  @Override
+  public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue) {
+    throw new UnsupportedOperationException(this.getClass().getName()+" does not support method - setInputQueue()");
+  }
 
-    @Override
-    public void setStreamConfig(StreamsConfiguration config) {
-        this.config = config;
-    }
+  @Override
+  public void setStreamConfig(StreamsConfiguration config) {
+    this.config = config;
+  }
 
 
-    @Override
-    public void run() {
-        try {
-            this.provider.prepare(this.config); //TODO allow for configuration objects
-            StreamsResultSet resultSet = null;
-            //Negative values mean we want to run forever
-            long maxZeros = timeout < 0 ? Long.MAX_VALUE : (timeout / sleepTime);
-            if(this.counter == null) { //should never be null
-                this.counter = new StreamsTaskCounter(this.provider.getClass().getName()+ UUID.randomUUID().toString(), getStreamIdentifier(), getStartedAt());
+  @Override
+  public void run() {
+    try {
+      this.provider.prepare(this.config); //TODO allow for configuration objects
+      StreamsResultSet resultSet = null;
+      //Negative values mean we want to run forever
+      long maxZeros = timeout < 0 ? Long.MAX_VALUE : (timeout / sleepTime);
+      if(this.counter == null) { //should never be null
+        this.counter = new StreamsTaskCounter(this.provider.getClass().getName()+ UUID.randomUUID().toString(), getStreamIdentifier(), getStartedAt());
+      }
+      switch(this.type) {
+        case PERPETUAL: {
+          provider.startStream();
+          this.started.set(true);
+          while(this.isRunning()) {
+            try {
+              long startTime = System.currentTimeMillis();
+              resultSet = provider.readCurrent();
+              this.counter.addTime(System.currentTimeMillis() - startTime);
+              if( resultSet.size() == 0 )
+                zeros++;
+              else {
+                zeros = 0;
+              }
+              flushResults(resultSet);
+              // the way this works needs to change...
+              if(zeros > maxZeros)
+                this.keepRunning.set(false);
+              if(zeros > 0)
+                Uninterruptibles.sleepUninterruptibly(sleepTime, TimeUnit.MILLISECONDS);
+            } catch (Exception e) {
+              this.counter.incrementErrorCount();
+              LOGGER.warn("Thread exception");
+              this.keepRunning.set(false);
             }
-            switch(this.type) {
-                case PERPETUAL: {
-                    provider.startStream();
-                    this.started.set(true);
-                    while(this.isRunning()) {
-                        try {
-                            long startTime = System.currentTimeMillis();
-                            resultSet = provider.readCurrent();
-                            this.counter.addTime(System.currentTimeMillis() - startTime);
-                            if( resultSet.size() == 0 )
-                                zeros++;
-                            else {
-                                zeros = 0;
-                            }
-                            flushResults(resultSet);
-                            // the way this works needs to change...
-                            if(zeros > maxZeros)
-                                this.keepRunning.set(false);
-                            if(zeros > 0)
-                                Uninterruptibles.sleepUninterruptibly(sleepTime, TimeUnit.MILLISECONDS);
-                        } catch (Exception e) {
-                            this.counter.incrementErrorCount();
-                            LOGGER.warn("Thread exception");
-                            this.keepRunning.set(false);
-                        }
-                    }
-                    Uninterruptibles.sleepUninterruptibly(sleepTime, TimeUnit.MILLISECONDS);
-                }
-                    break;
-                case READ_CURRENT:
-                    resultSet = this.provider.readCurrent();
-                    this.started.set(true);
-                    break;
-                case READ_NEW:
-                    resultSet = this.provider.readNew(this.sequence);
-                    this.started.set(true);
-                    break;
-                case READ_RANGE:
-                    resultSet = this.provider.readRange(this.dateRange[START], this.dateRange[END]);
-                    this.started.set(true);
-                    break;
-                default: throw new RuntimeException("Type has not been added to StreamsProviderTask.");
-            }
-            if( resultSet != null )
-                flushResults(resultSet);
-
-        } catch(Throwable e) {
-            LOGGER.error("Caught Throwable in Provider {}", this.provider.getClass().getSimpleName(), e);
-        }  finally {
-            Uninterruptibles.sleepUninterruptibly(sleepTime, TimeUnit.MILLISECONDS);
-            LOGGER.debug("Complete Provider Task execution for {}", this.provider.getClass().getSimpleName());
-            this.provider.cleanUp();
-            //Setting started to 'true' here will allow the isRunning() method to return false in the event of an exception
-            //before started would normally be set to true n the run method.
-            this.started.set(true);
-            this.keepRunning.set(false);
+          }
+          Uninterruptibles.sleepUninterruptibly(sleepTime, TimeUnit.MILLISECONDS);
         }
-    }
+        break;
+        case READ_CURRENT:
+          resultSet = this.provider.readCurrent();
+          this.started.set(true);
+          break;
+        case READ_NEW:
+          resultSet = this.provider.readNew(this.sequence);
+          this.started.set(true);
+          break;
+        case READ_RANGE:
+          resultSet = this.provider.readRange(this.dateRange[START], this.dateRange[END]);
+          this.started.set(true);
+          break;
+        default: throw new RuntimeException("Type has not been added to StreamsProviderTask.");
+      }
+      if( resultSet != null )
+        flushResults(resultSet);
 
-    @Override
-    public boolean isRunning() {
-        //We want to make sure that we never return false if it is flushing, regardless of the state of the provider
-        //or whether we have been told to shut down.  If someone really wants us to shut down, they will interrupt the
-        //thread and force us to shutdown.  We also want to make sure we have had the opportunity to run before the
-        //runtime kills us.
-        return !this.started.get() || this.flushing.get() || (this.provider.isRunning() && this.keepRunning.get());
+    } catch(Throwable e) {
+      LOGGER.error("Caught Throwable in Provider {}", this.provider.getClass().getSimpleName(), e);
+    }  finally {
+      Uninterruptibles.sleepUninterruptibly(sleepTime, TimeUnit.MILLISECONDS);
+      LOGGER.debug("Complete Provider Task execution for {}", this.provider.getClass().getSimpleName());
+      this.provider.cleanUp();
+      //Setting started to 'true' here will allow the isRunning() method to return false in the event of an exception
+      //before started would normally be set to true n the run method.
+      this.started.set(true);
+      this.keepRunning.set(false);
     }
+  }
 
-    public void flushResults(StreamsResultSet resultSet) {
-        Queue<StreamsDatum> queue = resultSet.getQueue();
-        this.flushing.set(true);
-        while(!queue.isEmpty()) {
-            StreamsDatum datum = queue.poll();
-            if(!this.keepRunning.get()) {
-                break;
-            }
-            if(datum != null) {
-                try {
-                    super.addToOutgoingQueue(datum);
-                    this.counter.incrementEmittedCount();
-                    statusCounter.incrementStatus(DatumStatus.SUCCESS);
-                } catch( Exception e ) {
-                    this.counter.incrementErrorCount();
-                    statusCounter.incrementStatus(DatumStatus.FAIL);
-                    DatumUtils.addErrorToMetadata(datum, e, this.provider.getClass());
-                }
-            }
+  @Override
+  public boolean isRunning() {
+    //We want to make sure that we never return false if it is flushing, regardless of the state of the provider
+    //or whether we have been told to shut down.  If someone really wants us to shut down, they will interrupt the
+    //thread and force us to shutdown.  We also want to make sure we have had the opportunity to run before the
+    //runtime kills us.
+    return !this.started.get() || this.flushing.get() || (this.provider.isRunning() && this.keepRunning.get());
+  }
+
+  public void flushResults(StreamsResultSet resultSet) {
+    Queue<StreamsDatum> queue = resultSet.getQueue();
+    this.flushing.set(true);
+    while(!queue.isEmpty()) {
+      StreamsDatum datum = queue.poll();
+      if(!this.keepRunning.get()) {
+        break;
+      }
+      if(datum != null) {
+        try {
+          super.addToOutgoingQueue(datum);
+          this.counter.incrementEmittedCount();
+          statusCounter.incrementStatus(DatumStatus.SUCCESS);
+        } catch( Exception e ) {
+          this.counter.incrementErrorCount();
+          statusCounter.incrementStatus(DatumStatus.FAIL);
+          DatumUtils.addErrorToMetadata(datum, e, this.provider.getClass());
         }
-        this.flushing.set(false);
+      }
     }
+    this.flushing.set(false);
+  }
 
-    @Override
-    public void setStreamsTaskCounter(StreamsTaskCounter counter) {
-        this.counter = counter;
-    }
+  @Override
+  public void setStreamsTaskCounter(StreamsTaskCounter counter) {
+    this.counter = counter;
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsTask.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsTask.java b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsTask.java
index 5c14c1f..1b91e5c 100644
--- a/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsTask.java
+++ b/streams-runtimes/streams-runtime-local/src/main/java/org/apache/streams/local/tasks/StreamsTask.java
@@ -23,8 +23,6 @@ import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.local.counters.StreamsTaskCounter;
 
 import java.util.List;
-import java.util.Map;
-import java.util.Queue;
 import java.util.concurrent.BlockingQueue;
 
 /**
@@ -33,53 +31,53 @@ import java.util.concurrent.BlockingQueue;
  */
 public interface StreamsTask extends Runnable{
 
-    /**
-     * Informs the task to stop. Tasks may or may not try to empty its inbound queue before halting.
-     */
-    public void stopTask();
+  /**
+   * Informs the task to stop. Tasks may or may not try to empty its inbound queue before halting.
+   */
+  public void stopTask();
 
-    /**
-     * Returns true if the task is waiting on more data to process
-     * @return true, if waiting on more data to process
-     */
-    public boolean isWaiting();
-    /**
-     * Add an input {@link java.util.Queue} for this task.
-     * @param inputQueue
-     */
-    public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue);
+  /**
+   * Returns true if the task is waiting on more data to process
+   * @return true, if waiting on more data to process
+   */
+  public boolean isWaiting();
+  /**
+   * Add an input {@link java.util.Queue} for this task.
+   * @param inputQueue
+   */
+  public void addInputQueue(BlockingQueue<StreamsDatum> inputQueue);
 
-    /**
-     * Add an output {@link java.util.Queue} for this task.
-     * @param outputQueue
-     */
-    public void addOutputQueue(BlockingQueue<StreamsDatum> outputQueue);
+  /**
+   * Add an output {@link java.util.Queue} for this task.
+   * @param outputQueue
+   */
+  public void addOutputQueue(BlockingQueue<StreamsDatum> outputQueue);
 
-    /**
-     * Set the configuration object that will shared and passed to all instances of StreamsTask.
-     * @param config optional configuration information
-     */
-    public void setStreamConfig(StreamsConfiguration config);
+  /**
+   * Set the configuration object that will shared and passed to all instances of StreamsTask.
+   * @param config optional configuration information
+   */
+  public void setStreamConfig(StreamsConfiguration config);
 
-    /**
-     * Returns true when the task has not completed. Returns false otherwise
-     * @return true when the task has not completed. Returns false otherwise
-     */
-    public boolean isRunning();
+  /**
+   * Returns true when the task has not completed. Returns false otherwise
+   * @return true when the task has not completed. Returns false otherwise
+   */
+  public boolean isRunning();
 
-    /**
-     * Returns the input queues that have been set for this task.
-     * @return list of input queues
-     */
-    public List<BlockingQueue<StreamsDatum>> getInputQueues();
+  /**
+   * Returns the input queues that have been set for this task.
+   * @return list of input queues
+   */
+  public List<BlockingQueue<StreamsDatum>> getInputQueues();
 
-    /**
-     * Returns the output queues that have been set for this task
-     * @return list of output queues
-     */
-    public List<BlockingQueue<StreamsDatum>> getOutputQueues();
+  /**
+   * Returns the output queues that have been set for this task
+   * @return list of output queues
+   */
+  public List<BlockingQueue<StreamsDatum>> getOutputQueues();
 
 
-    public void setStreamsTaskCounter(StreamsTaskCounter counter);
+  public void setStreamsTaskCounter(StreamsTaskCounter counter);
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/LocalStreamBuilderTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/LocalStreamBuilderTest.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/LocalStreamBuilderTest.java
index 2bbfdcc..741c0b5 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/LocalStreamBuilderTest.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/LocalStreamBuilderTest.java
@@ -18,11 +18,6 @@
 
 package org.apache.streams.local.builders;
 
-import com.carrotsearch.randomizedtesting.RandomizedTest;
-import com.carrotsearch.randomizedtesting.annotations.Repeat;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.util.concurrent.Uninterruptibles;
 import org.apache.streams.core.StreamBuilder;
 import org.apache.streams.core.StreamsDatum;
 import org.apache.streams.core.StreamsPersistWriter;
@@ -36,6 +31,12 @@ import org.apache.streams.local.test.providers.NumericMessageProvider;
 import org.apache.streams.local.test.writer.DatumCounterWriter;
 import org.apache.streams.local.test.writer.SystemOutWriter;
 import org.apache.streams.util.ComponentUtils;
+
+import com.carrotsearch.randomizedtesting.RandomizedTest;
+import com.carrotsearch.randomizedtesting.annotations.Repeat;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.util.concurrent.Uninterruptibles;
 import org.joda.time.DateTime;
 import org.junit.After;
 import org.junit.Before;
@@ -44,7 +45,6 @@ import org.junit.Test;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
-import javax.management.*;
 import java.lang.management.ManagementFactory;
 import java.util.Collections;
 import java.util.List;
@@ -55,10 +55,21 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
+import javax.management.InstanceNotFoundException;
+import javax.management.MBeanRegistrationException;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
 
-import static org.hamcrest.Matchers.*;
+import static org.hamcrest.Matchers.allOf;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
 import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
 /**
  * Basic Tests for the LocalStreamBuilder.
@@ -70,334 +81,334 @@ import static org.mockito.Mockito.*;
  *
  */
 public class LocalStreamBuilderTest extends RandomizedTest {
-    private static final String MBEAN_ID = "test_id";
-    private static final String STREAM_ID = "test_stream";
-    private static long STREAM_START_TIME = (new DateTime()).getMillis();
+  private static final String MBEAN_ID = "test_id";
+  private static final String STREAM_ID = "test_stream";
+  private static long STREAM_START_TIME = (new DateTime()).getMillis();
 
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
+  }
 
 
-    public void removeRegisteredMBeans(String... ids) {
-        MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-        for(String id : ids) {
-            try {
-                mbs.unregisterMBean(new ObjectName(String.format(ThroughputQueue.NAME_TEMPLATE, id, STREAM_ID, STREAM_START_TIME)));
-            } catch (MalformedObjectNameException|InstanceNotFoundException|MBeanRegistrationException e) {
-                //No-op
-            }
-            try {
-                mbs.unregisterMBean(new ObjectName((String.format(StreamsTaskCounter.NAME_TEMPLATE, id, STREAM_ID, STREAM_START_TIME))));
-            } catch (MalformedObjectNameException|InstanceNotFoundException|MBeanRegistrationException e) {
-                //No-op
-            }
-        }
+  public void removeRegisteredMBeans(String... ids) {
+    MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+    for(String id : ids) {
+      try {
+        mbs.unregisterMBean(new ObjectName(String.format(ThroughputQueue.NAME_TEMPLATE, id, STREAM_ID, STREAM_START_TIME)));
+      } catch (MalformedObjectNameException|InstanceNotFoundException|MBeanRegistrationException e) {
+        //No-op
+      }
+      try {
+        mbs.unregisterMBean(new ObjectName((String.format(StreamsTaskCounter.NAME_TEMPLATE, id, STREAM_ID, STREAM_START_TIME))));
+      } catch (MalformedObjectNameException|InstanceNotFoundException|MBeanRegistrationException e) {
+        //No-op
+      }
     }
+  }
 
 
 
 
-    @Test
-    public void testStreamIdValidations() {
-        StreamBuilder builder = new LocalStreamBuilder();
-        builder.newReadCurrentStream("id", new NumericMessageProvider(1));
-        Exception exp = null;
-        try {
-            builder.newReadCurrentStream("id", new NumericMessageProvider(1));
-        } catch (RuntimeException e) {
-            exp = e;
-        }
-        assertNotNull(exp);
-        exp = null;
-        builder.addStreamsProcessor("1", new PassthroughDatumCounterProcessor("1"), 1, "id");
-        try {
-            builder.addStreamsProcessor("2", new PassthroughDatumCounterProcessor("2"), 1, "id", "id2");
-        } catch (RuntimeException e) {
-            exp = e;
-        }
-        assertNotNull(exp);
-        removeRegisteredMBeans("1", "2", "id");
+  @Test
+  public void testStreamIdValidations() {
+    StreamBuilder builder = new LocalStreamBuilder();
+    builder.newReadCurrentStream("id", new NumericMessageProvider(1));
+    Exception exp = null;
+    try {
+      builder.newReadCurrentStream("id", new NumericMessageProvider(1));
+    } catch (RuntimeException e) {
+      exp = e;
     }
-
-    @Test
-    public void testBasicLinearStream1()  {
-        linearStreamNonParallel(1, 1);
+    assertNotNull(exp);
+    exp = null;
+    builder.addStreamsProcessor("1", new PassthroughDatumCounterProcessor("1"), 1, "id");
+    try {
+      builder.addStreamsProcessor("2", new PassthroughDatumCounterProcessor("2"), 1, "id", "id2");
+    } catch (RuntimeException e) {
+      exp = e;
     }
+    assertNotNull(exp);
+    removeRegisteredMBeans("1", "2", "id");
+  }
 
-    @Test
-    public void testBasicLinearStream2()  {
-        linearStreamNonParallel(1004, 1);
-    }
+  @Test
+  public void testBasicLinearStream1()  {
+    linearStreamNonParallel(1, 1);
+  }
 
-    @Test
-    public void testBasicLinearStream3()  {
-        linearStreamNonParallel(1, 10);
-    }
+  @Test
+  public void testBasicLinearStream2()  {
+    linearStreamNonParallel(1004, 1);
+  }
 
-    @Test
-    @Repeat(iterations = 3)
-    public void testBasicLinearStreamRandom()  {
-        int numDatums = randomIntBetween(1, 100000);
-        int numProcessors = randomIntBetween(1, 10);
-        linearStreamNonParallel(numDatums, numProcessors);
-    }
+  @Test
+  public void testBasicLinearStream3()  {
+    linearStreamNonParallel(1, 10);
+  }
+
+  @Test
+  @Repeat(iterations = 3)
+  public void testBasicLinearStreamRandom()  {
+    int numDatums = randomIntBetween(1, 100000);
+    int numProcessors = randomIntBetween(1, 10);
+    linearStreamNonParallel(numDatums, numProcessors);
+  }
 
-    /**
-     * Tests that all datums pass through each processor and that all datums reach the writer
-     * @param numDatums
-     * @param numProcessors
-     */
-    private void linearStreamNonParallel(int numDatums, int numProcessors) {
-        String processorId = "proc";
-        try {
-            StreamBuilder builder = new LocalStreamBuilder(10);
-            builder.newPerpetualStream("numeric_provider", new NumericMessageProvider(numDatums));
-            String connectTo = null;
-            for(int i=0; i < numProcessors; ++i) {
-                if(i == 0) {
-                    connectTo = "numeric_provider";
-                } else {
-                    connectTo = processorId+(i-1);
-                }
-                builder.addStreamsProcessor(processorId+i, new PassthroughDatumCounterProcessor(processorId+i), 1, connectTo);
-            }
-            Set output = Collections.newSetFromMap(new ConcurrentHashMap());
-            builder.addStreamsPersistWriter("writer", new DatumCounterWriter("writer"), 1, processorId+(numProcessors-1));
-            builder.start();
-            for(int i=0; i < numProcessors; ++i) {
-                assertEquals("Processor "+i+" did not receive all of the datums", numDatums, PassthroughDatumCounterProcessor.COUNTS.get(processorId+i).get());
-            }
-            for(int i=0; i < numDatums; ++i) {
-                assertTrue("Expected writer to have received : "+i, DatumCounterWriter.RECEIVED.get("writer").contains(i));
-            }
-        } finally {
-            for(int i=0; i < numProcessors; ++i) {
-                removeRegisteredMBeans(processorId+i, processorId+i+"-"+PassthroughDatumCounterProcessor.class.getCanonicalName());
-            }
-            removeRegisteredMBeans("writer", "numeric_provider");
+  /**
+   * Tests that all datums pass through each processor and that all datums reach the writer
+   * @param numDatums
+   * @param numProcessors
+   */
+  private void linearStreamNonParallel(int numDatums, int numProcessors) {
+    String processorId = "proc";
+    try {
+      StreamBuilder builder = new LocalStreamBuilder(10);
+      builder.newPerpetualStream("numeric_provider", new NumericMessageProvider(numDatums));
+      String connectTo = null;
+      for(int i=0; i < numProcessors; ++i) {
+        if(i == 0) {
+          connectTo = "numeric_provider";
+        } else {
+          connectTo = processorId+(i-1);
         }
+        builder.addStreamsProcessor(processorId+i, new PassthroughDatumCounterProcessor(processorId+i), 1, connectTo);
+      }
+      Set output = Collections.newSetFromMap(new ConcurrentHashMap());
+      builder.addStreamsPersistWriter("writer", new DatumCounterWriter("writer"), 1, processorId+(numProcessors-1));
+      builder.start();
+      for(int i=0; i < numProcessors; ++i) {
+        assertEquals("Processor "+i+" did not receive all of the datums", numDatums, PassthroughDatumCounterProcessor.COUNTS.get(processorId+i).get());
+      }
+      for(int i=0; i < numDatums; ++i) {
+        assertTrue("Expected writer to have received : "+i, DatumCounterWriter.RECEIVED.get("writer").contains(i));
+      }
+    } finally {
+      for(int i=0; i < numProcessors; ++i) {
+        removeRegisteredMBeans(processorId+i, processorId+i+"-"+PassthroughDatumCounterProcessor.class.getCanonicalName());
+      }
+      removeRegisteredMBeans("writer", "numeric_provider");
     }
+  }
 
-    @Test
-    public void testParallelLinearStream1() {
-        String processorId = "proc";
-        int numProcessors = randomIntBetween(1, 10);
-        int numDatums = randomIntBetween(1, 300000);
-        try {
-            StreamBuilder builder = new LocalStreamBuilder(50);
-            builder.newPerpetualStream("numeric_provider", new NumericMessageProvider(numDatums));
-            String connectTo = null;
-            for(int i=0; i < numProcessors; ++i) {
-                if(i == 0) {
-                    connectTo = "numeric_provider";
-                } else {
-                    connectTo = processorId+(i-1);
-                }
-                int parallelHint = randomIntBetween(1,5);
-                builder.addStreamsProcessor(processorId+i, new PassthroughDatumCounterProcessor(processorId+i), parallelHint, connectTo);
-            }
-            builder.addStreamsPersistWriter("writer", new DatumCounterWriter("writer"), 1, processorId+(numProcessors-1));
-            builder.start();
-            Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS);
-            builder.stop();
-            Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS);
-            assertEquals(numDatums, DatumCounterWriter.RECEIVED.get("writer").size());
-            for(int i=0; i < numDatums; ++i) {
-                assertTrue("Expected Writer to receive datum : " + i, DatumCounterWriter.RECEIVED.get("writer").contains(i));
-            }
-            for(int i=0; i < numProcessors; ++i) {
-                assertEquals(numDatums, PassthroughDatumCounterProcessor.COUNTS.get(processorId+i).get());
-            }
-
-        } finally {
-            for(int i=0; i < numProcessors; ++i) {
-                removeRegisteredMBeans(processorId+i);
-            }
-            removeRegisteredMBeans("writer", "numeric_provider");
+  @Test
+  public void testParallelLinearStream1() {
+    String processorId = "proc";
+    int numProcessors = randomIntBetween(1, 10);
+    int numDatums = randomIntBetween(1, 300000);
+    try {
+      StreamBuilder builder = new LocalStreamBuilder(50);
+      builder.newPerpetualStream("numeric_provider", new NumericMessageProvider(numDatums));
+      String connectTo = null;
+      for(int i=0; i < numProcessors; ++i) {
+        if(i == 0) {
+          connectTo = "numeric_provider";
+        } else {
+          connectTo = processorId+(i-1);
         }
+        int parallelHint = randomIntBetween(1,5);
+        builder.addStreamsProcessor(processorId+i, new PassthroughDatumCounterProcessor(processorId+i), parallelHint, connectTo);
+      }
+      builder.addStreamsPersistWriter("writer", new DatumCounterWriter("writer"), 1, processorId+(numProcessors-1));
+      builder.start();
+      Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS);
+      builder.stop();
+      Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS);
+      assertEquals(numDatums, DatumCounterWriter.RECEIVED.get("writer").size());
+      for(int i=0; i < numDatums; ++i) {
+        assertTrue("Expected Writer to receive datum : " + i, DatumCounterWriter.RECEIVED.get("writer").contains(i));
+      }
+      for(int i=0; i < numProcessors; ++i) {
+        assertEquals(numDatums, PassthroughDatumCounterProcessor.COUNTS.get(processorId+i).get());
+      }
+
+    } finally {
+      for(int i=0; i < numProcessors; ++i) {
+        removeRegisteredMBeans(processorId+i);
+      }
+      removeRegisteredMBeans("writer", "numeric_provider");
     }
+  }
 
-    @Test
-    public void testBasicMergeStream() {
-        try {
-            int numDatums1 = randomIntBetween(1, 300000);
-            int numDatums2 = randomIntBetween(1, 300000);
-            StreamsProcessor processor1 = new PassthroughDatumCounterProcessor("proc1");
-            StreamsProcessor processor2 = new PassthroughDatumCounterProcessor("proc2");
-            StreamBuilder builder = new LocalStreamBuilder();
-            builder.newPerpetualStream("sp1", new NumericMessageProvider(numDatums1))
-                    .newPerpetualStream("sp2", new NumericMessageProvider(numDatums2))
-                    .addStreamsProcessor("proc1", processor1, 1, "sp1")
-                    .addStreamsProcessor("proc2", processor2, 1, "sp2")
-                    .addStreamsPersistWriter("writer1", new DatumCounterWriter("writer"), 1, "proc1", "proc2");
-            builder.start();
-            assertEquals(numDatums1, PassthroughDatumCounterProcessor.COUNTS.get("proc1").get());
-            assertEquals(numDatums2, PassthroughDatumCounterProcessor.COUNTS.get("proc2").get());
-            assertEquals(numDatums1+numDatums2, DatumCounterWriter.COUNTS.get("writer").get());
-        } finally {
-            String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
-            String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
-            removeRegisteredMBeans("proc1", "proc2", "writer1", "sp1", "sp2");
-        }
+  @Test
+  public void testBasicMergeStream() {
+    try {
+      int numDatums1 = randomIntBetween(1, 300000);
+      int numDatums2 = randomIntBetween(1, 300000);
+      StreamsProcessor processor1 = new PassthroughDatumCounterProcessor("proc1");
+      StreamsProcessor processor2 = new PassthroughDatumCounterProcessor("proc2");
+      StreamBuilder builder = new LocalStreamBuilder();
+      builder.newPerpetualStream("sp1", new NumericMessageProvider(numDatums1))
+          .newPerpetualStream("sp2", new NumericMessageProvider(numDatums2))
+          .addStreamsProcessor("proc1", processor1, 1, "sp1")
+          .addStreamsProcessor("proc2", processor2, 1, "sp2")
+          .addStreamsPersistWriter("writer1", new DatumCounterWriter("writer"), 1, "proc1", "proc2");
+      builder.start();
+      assertEquals(numDatums1, PassthroughDatumCounterProcessor.COUNTS.get("proc1").get());
+      assertEquals(numDatums2, PassthroughDatumCounterProcessor.COUNTS.get("proc2").get());
+      assertEquals(numDatums1+numDatums2, DatumCounterWriter.COUNTS.get("writer").get());
+    } finally {
+      String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
+      String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
+      removeRegisteredMBeans("proc1", "proc2", "writer1", "sp1", "sp2");
     }
+  }
 
-    @Test
-    public void testBasicBranch() {
-        try {
-            int numDatums = randomIntBetween(1, 300000);
-            StreamBuilder builder = new LocalStreamBuilder(50);
-            builder.newPerpetualStream("prov1", new NumericMessageProvider(numDatums))
-                    .addStreamsProcessor("proc1", new PassthroughDatumCounterProcessor("proc1"), 1, "prov1")
-                    .addStreamsProcessor("proc2", new PassthroughDatumCounterProcessor("proc2"), 1, "prov1")
-                    .addStreamsPersistWriter("w1", new DatumCounterWriter("writer"), 1, "proc1", "proc2");
-            builder.start();
-            assertEquals(numDatums, PassthroughDatumCounterProcessor.COUNTS.get("proc1").get());
-            assertEquals(numDatums, PassthroughDatumCounterProcessor.COUNTS.get("proc2").get());
-            assertEquals(numDatums*2, DatumCounterWriter.COUNTS.get("writer").get());
-        } finally {
-            String provClass = "-"+NumericMessageProvider.class.getCanonicalName();
-            String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
-            String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
-            removeRegisteredMBeans("prov1", "proc1", "proc2", "w1");
-        }
+  @Test
+  public void testBasicBranch() {
+    try {
+      int numDatums = randomIntBetween(1, 300000);
+      StreamBuilder builder = new LocalStreamBuilder(50);
+      builder.newPerpetualStream("prov1", new NumericMessageProvider(numDatums))
+          .addStreamsProcessor("proc1", new PassthroughDatumCounterProcessor("proc1"), 1, "prov1")
+          .addStreamsProcessor("proc2", new PassthroughDatumCounterProcessor("proc2"), 1, "prov1")
+          .addStreamsPersistWriter("w1", new DatumCounterWriter("writer"), 1, "proc1", "proc2");
+      builder.start();
+      assertEquals(numDatums, PassthroughDatumCounterProcessor.COUNTS.get("proc1").get());
+      assertEquals(numDatums, PassthroughDatumCounterProcessor.COUNTS.get("proc2").get());
+      assertEquals(numDatums*2, DatumCounterWriter.COUNTS.get("writer").get());
+    } finally {
+      String provClass = "-"+NumericMessageProvider.class.getCanonicalName();
+      String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
+      String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
+      removeRegisteredMBeans("prov1", "proc1", "proc2", "w1");
     }
+  }
 
-    @Test
-    public void testSlowProcessorBranch() {
-        try {
-            int numDatums = 30;
-            int timeout = 2000;
-            Map<String, Object> config = Maps.newHashMap();
-            config.put(LocalStreamBuilder.TIMEOUT_KEY, timeout);
-            StreamBuilder builder = new LocalStreamBuilder(config);
-            builder.newPerpetualStream("prov1", new NumericMessageProvider(numDatums))
-                    .addStreamsProcessor("proc1", new SlowProcessor(), 1, "prov1")
-                    .addStreamsPersistWriter("w1", new DatumCounterWriter("writer"), 1, "proc1");
-            builder.start();
-            assertEquals(numDatums, DatumCounterWriter.COUNTS.get("writer").get());
-        } finally {
-            String provClass = "-"+NumericMessageProvider.class.getCanonicalName();
-            String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
-            String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
-            removeRegisteredMBeans("prov1", "proc1", "w1");
-        }
+  @Test
+  public void testSlowProcessorBranch() {
+    try {
+      int numDatums = 30;
+      int timeout = 2000;
+      Map<String, Object> config = Maps.newHashMap();
+      config.put(LocalStreamBuilder.TIMEOUT_KEY, timeout);
+      StreamBuilder builder = new LocalStreamBuilder(config);
+      builder.newPerpetualStream("prov1", new NumericMessageProvider(numDatums))
+          .addStreamsProcessor("proc1", new SlowProcessor(), 1, "prov1")
+          .addStreamsPersistWriter("w1", new DatumCounterWriter("writer"), 1, "proc1");
+      builder.start();
+      assertEquals(numDatums, DatumCounterWriter.COUNTS.get("writer").get());
+    } finally {
+      String provClass = "-"+NumericMessageProvider.class.getCanonicalName();
+      String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
+      String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
+      removeRegisteredMBeans("prov1", "proc1", "w1");
     }
+  }
 
-    @Test
-    public void testConfiguredProviderTimeout() {
-        try {
-            Map<String, Object> config = Maps.newHashMap();
-            int timeout = 10000;
-            config.put(LocalStreamBuilder.TIMEOUT_KEY, timeout);
-            long start = System.currentTimeMillis();
-            StreamBuilder builder = new LocalStreamBuilder(-1, config);
-            builder.newPerpetualStream("prov1", new EmptyResultSetProvider())
-                    .addStreamsProcessor("proc1", new PassthroughDatumCounterProcessor("proc1"), 1, "prov1")
-                    .addStreamsProcessor("proc2", new PassthroughDatumCounterProcessor("proc2"), 1, "proc1")
-                    .addStreamsPersistWriter("w1", new SystemOutWriter(), 1, "proc1");
-            builder.start();
-            long end = System.currentTimeMillis();
-            //We care mostly that it doesn't terminate too early.  With thread shutdowns, etc, the actual time is indeterminate.  Just make sure there is an upper bound
-            assertThat((int) (end - start), is(allOf(greaterThanOrEqualTo(timeout), lessThanOrEqualTo(4 * timeout))));
-        } finally {
-            String provClass = "-"+NumericMessageProvider.class.getCanonicalName();
-            String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
-            String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
-            removeRegisteredMBeans("prov1", "proc1", "proc2", "w1");
-        }
+  @Test
+  public void testConfiguredProviderTimeout() {
+    try {
+      Map<String, Object> config = Maps.newHashMap();
+      int timeout = 10000;
+      config.put(LocalStreamBuilder.TIMEOUT_KEY, timeout);
+      long start = System.currentTimeMillis();
+      StreamBuilder builder = new LocalStreamBuilder(-1, config);
+      builder.newPerpetualStream("prov1", new EmptyResultSetProvider())
+          .addStreamsProcessor("proc1", new PassthroughDatumCounterProcessor("proc1"), 1, "prov1")
+          .addStreamsProcessor("proc2", new PassthroughDatumCounterProcessor("proc2"), 1, "proc1")
+          .addStreamsPersistWriter("w1", new SystemOutWriter(), 1, "proc1");
+      builder.start();
+      long end = System.currentTimeMillis();
+      //We care mostly that it doesn't terminate too early.  With thread shutdowns, etc, the actual time is indeterminate.  Just make sure there is an upper bound
+      assertThat((int) (end - start), is(allOf(greaterThanOrEqualTo(timeout), lessThanOrEqualTo(4 * timeout))));
+    } finally {
+      String provClass = "-"+NumericMessageProvider.class.getCanonicalName();
+      String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
+      String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
+      removeRegisteredMBeans("prov1", "proc1", "proc2", "w1");
     }
+  }
 
-    @Ignore
-    @Test
-    public void ensureShutdownWithBlockedQueue() throws InterruptedException {
-        try {
-            ExecutorService service = Executors.newSingleThreadExecutor();
-            int before = Thread.activeCount();
-            final StreamBuilder builder = new LocalStreamBuilder();
-            builder.newPerpetualStream("prov1", new NumericMessageProvider(30))
-                    .addStreamsProcessor("proc1", new SlowProcessor(), 1, "prov1")
-                    .addStreamsPersistWriter("w1", new SystemOutWriter(), 1, "proc1");
-            service.submit(new Runnable() {
-                @Override
-                public void run() {
-                    builder.start();
-                }
-            });
-            //Let streams spin up threads and start to process
-            Thread.sleep(500);
-            builder.stop();
-            service.shutdownNow();
-            service.awaitTermination(30000, TimeUnit.MILLISECONDS);
-            assertThat(Thread.activeCount(), is(equalTo(before)));
-        } finally {
-            String provClass = "-"+NumericMessageProvider.class.getCanonicalName();
-            String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
-            String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
-            removeRegisteredMBeans("prov1", "proc1", "w1");
+  @Ignore
+  @Test
+  public void ensureShutdownWithBlockedQueue() throws InterruptedException {
+    try {
+      ExecutorService service = Executors.newSingleThreadExecutor();
+      int before = Thread.activeCount();
+      final StreamBuilder builder = new LocalStreamBuilder();
+      builder.newPerpetualStream("prov1", new NumericMessageProvider(30))
+          .addStreamsProcessor("proc1", new SlowProcessor(), 1, "prov1")
+          .addStreamsPersistWriter("w1", new SystemOutWriter(), 1, "proc1");
+      service.submit(new Runnable() {
+        @Override
+        public void run() {
+          builder.start();
         }
+      });
+      //Let streams spin up threads and start to process
+      Thread.sleep(500);
+      builder.stop();
+      service.shutdownNow();
+      service.awaitTermination(30000, TimeUnit.MILLISECONDS);
+      assertThat(Thread.activeCount(), is(equalTo(before)));
+    } finally {
+      String provClass = "-"+NumericMessageProvider.class.getCanonicalName();
+      String procClass = "-"+PassthroughDatumCounterProcessor.class.getCanonicalName();
+      String writerClass = "-"+DatumCounterWriter.class.getCanonicalName();
+      removeRegisteredMBeans("prov1", "proc1", "w1");
     }
+  }
 
-    @Before
-    private void clearCounters() {
-        PassthroughDatumCounterProcessor.COUNTS.clear();
-        PassthroughDatumCounterProcessor.CLAIMED_ID.clear();
-        PassthroughDatumCounterProcessor.SEEN_DATA.clear();
-        DatumCounterWriter.COUNTS.clear();
-        DatumCounterWriter.CLAIMED_ID.clear();
-        DatumCounterWriter.SEEN_DATA.clear();
-        DatumCounterWriter.RECEIVED.clear();
-    }
+  @Before
+  private void clearCounters() {
+    PassthroughDatumCounterProcessor.COUNTS.clear();
+    PassthroughDatumCounterProcessor.CLAIMED_ID.clear();
+    PassthroughDatumCounterProcessor.SEEN_DATA.clear();
+    DatumCounterWriter.COUNTS.clear();
+    DatumCounterWriter.CLAIMED_ID.clear();
+    DatumCounterWriter.SEEN_DATA.clear();
+    DatumCounterWriter.RECEIVED.clear();
+  }
 
 
-    /**
-     * Creates {@link org.apache.streams.core.StreamsProcessor} that passes any StreamsDatum it gets as an
-     * input and counts the number of items it processes.
-     * @param counter
-     * @return
-     */
-    private StreamsProcessor createPassThroughProcessor(final AtomicInteger counter) {
-        StreamsProcessor processor = mock(StreamsProcessor.class);
-        when(processor.process(any(StreamsDatum.class))).thenAnswer(new Answer<List<StreamsDatum>>() {
-            @Override
-            public List<StreamsDatum> answer(InvocationOnMock invocationOnMock) throws Throwable {
-                List<StreamsDatum> datum = Lists.newLinkedList();
-                if(counter != null) {
-                    counter.incrementAndGet();
-                }
-                datum.add((StreamsDatum) invocationOnMock.getArguments()[0] );
-                return datum;
-            }
-        });
-        return processor;
-    }
+  /**
+   * Creates {@link org.apache.streams.core.StreamsProcessor} that passes any StreamsDatum it gets as an
+   * input and counts the number of items it processes.
+   * @param counter
+   * @return
+   */
+  private StreamsProcessor createPassThroughProcessor(final AtomicInteger counter) {
+    StreamsProcessor processor = mock(StreamsProcessor.class);
+    when(processor.process(any(StreamsDatum.class))).thenAnswer(new Answer<List<StreamsDatum>>() {
+      @Override
+      public List<StreamsDatum> answer(InvocationOnMock invocationOnMock) throws Throwable {
+        List<StreamsDatum> datum = Lists.newLinkedList();
+        if(counter != null) {
+          counter.incrementAndGet();
+        }
+        datum.add((StreamsDatum) invocationOnMock.getArguments()[0] );
+        return datum;
+      }
+    });
+    return processor;
+  }
 
-    private StreamsPersistWriter createSetCollectingWriter(final Set collector) {
-        return createSetCollectingWriter(collector, null);
-    }
+  private StreamsPersistWriter createSetCollectingWriter(final Set collector) {
+    return createSetCollectingWriter(collector, null);
+  }
 
-    /**
-     * Creates a StreamsPersistWriter that adds every datums document to a set
-     * @param collector
-     * @return
-     */
-    private StreamsPersistWriter createSetCollectingWriter(final Set collector, final AtomicInteger counter) {
-        StreamsPersistWriter writer = mock(StreamsPersistWriter.class);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                if(counter != null) {
-                    counter.incrementAndGet();
-                }
-                collector.add(((StreamsDatum)invocationOnMock.getArguments()[0]).getDocument());
-                return null;
-            }
-        }).when(writer).write(any(StreamsDatum.class));
-        return writer;
-    }
+  /**
+   * Creates a StreamsPersistWriter that adds every datums document to a set
+   * @param collector
+   * @return
+   */
+  private StreamsPersistWriter createSetCollectingWriter(final Set collector, final AtomicInteger counter) {
+    StreamsPersistWriter writer = mock(StreamsPersistWriter.class);
+    doAnswer(new Answer() {
+      @Override
+      public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
+        if(counter != null) {
+          counter.incrementAndGet();
+        }
+        collector.add(((StreamsDatum)invocationOnMock.getArguments()[0]).getDocument());
+        return null;
+      }
+    }).when(writer).write(any(StreamsDatum.class));
+    return writer;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/ToyLocalBuilderExample.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/ToyLocalBuilderExample.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/ToyLocalBuilderExample.java
index a77dfec..2c61093 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/ToyLocalBuilderExample.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/builders/ToyLocalBuilderExample.java
@@ -30,16 +30,16 @@ import org.apache.streams.local.test.writer.DoNothingWriter;
  */
 public class ToyLocalBuilderExample {
 
-    /**
-     * A simple example of how to run a stream in local mode.
-     * @param args
-     */
-    public static void main(String[] args) {
-        StreamBuilder builder = new LocalStreamBuilder();
-        builder.newReadCurrentStream("prov", new NumericMessageProvider(1000000))
-                .addStreamsProcessor("proc", new DoNothingProcessor(), 100, "prov")
-                .addStreamsPersistWriter("writer", new DoNothingWriter(), 3, "proc");
-        builder.start();
-    }
+  /**
+   * A simple example of how to run a stream in local mode.
+   * @param args
+   */
+  public static void main(String[] args) {
+    StreamBuilder builder = new LocalStreamBuilder();
+    builder.newReadCurrentStream("prov", new NumericMessageProvider(1000000))
+        .addStreamsProcessor("proc", new DoNothingProcessor(), 100, "prov")
+        .addStreamsPersistWriter("writer", new DoNothingWriter(), 3, "proc");
+    builder.start();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/DatumStatusCounterTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/DatumStatusCounterTest.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/DatumStatusCounterTest.java
index 9775c6f..9d92bec 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/DatumStatusCounterTest.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/DatumStatusCounterTest.java
@@ -23,112 +23,112 @@ import org.joda.time.DateTime;
 import org.junit.After;
 import org.junit.Test;
 
+import java.lang.management.ManagementFactory;
 import javax.management.InstanceNotFoundException;
 import javax.management.ObjectName;
-import java.lang.management.ManagementFactory;
 
 /**
  *
  */
 public class DatumStatusCounterTest extends RandomizedTest {
 
-    private static final String MBEAN_ID = "test_id";
-    private static final String STREAM_ID = "test_stream";
-    private static long STREAM_START_TIME = (new DateTime()).getMillis();
+  private static final String MBEAN_ID = "test_id";
+  private static final String STREAM_ID = "test_stream";
+  private static long STREAM_START_TIME = (new DateTime()).getMillis();
 
 
-    /**
-     * Remove registered mbeans from previous tests
-     * @throws Exception
-     */
-    @After
-    public void unregisterMXBean() throws Exception {
-        try {
-            ManagementFactory.getPlatformMBeanServer().unregisterMBean(new ObjectName(String.format(DatumStatusCounter.NAME_TEMPLATE, MBEAN_ID, STREAM_ID, STREAM_START_TIME)));
-        } catch (InstanceNotFoundException ife) {
-            //No-op
-        }
+  /**
+   * Remove registered mbeans from previous tests
+   * @throws Exception
+   */
+  @After
+  public void unregisterMXBean() throws Exception {
+    try {
+      ManagementFactory.getPlatformMBeanServer().unregisterMBean(new ObjectName(String.format(DatumStatusCounter.NAME_TEMPLATE, MBEAN_ID, STREAM_ID, STREAM_START_TIME)));
+    } catch (InstanceNotFoundException ife) {
+      //No-op
     }
+  }
 
-    /**
-     * Test Constructor can register the counter as an mxbean with throwing an exception.
-     */
-    @Test
-    public void testConstructor() {
-        try {
-            new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        } catch (Throwable t) {
-            fail("Constructor Threw Exception : "+t.getMessage());
-        }
+  /**
+   * Test Constructor can register the counter as an mxbean with throwing an exception.
+   */
+  @Test
+  public void testConstructor() {
+    try {
+      new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    } catch (Throwable t) {
+      fail("Constructor Threw Exception : "+t.getMessage());
     }
+  }
 
-    /**
-     * Test that you can increment passes and it returns the correct count
-     * @throws Exception
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testPassed() throws Exception {
-        DatumStatusCounter counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        int numIncrements = randomIntBetween(1, 100000);
-        for(int i=0; i < numIncrements; ++i) {
-            counter.incrementPassedCount();
-        }
-        assertEquals(numIncrements, counter.getNumPassed());
+  /**
+   * Test that you can increment passes and it returns the correct count
+   * @throws Exception
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testPassed() throws Exception {
+    DatumStatusCounter counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    int numIncrements = randomIntBetween(1, 100000);
+    for(int i=0; i < numIncrements; ++i) {
+      counter.incrementPassedCount();
+    }
+    assertEquals(numIncrements, counter.getNumPassed());
 
-        unregisterMXBean();
+    unregisterMXBean();
 
-        counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        numIncrements = randomIntBetween(1, 100000);
-        long total = 0;
-        for(int i=0; i < numIncrements; ++i) {
-            long delta = randomIntBetween(1, 100);
-            total += delta;
-            counter.incrementPassedCount(delta);
-        }
-        assertEquals(total, counter.getNumPassed());
+    counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    numIncrements = randomIntBetween(1, 100000);
+    long total = 0;
+    for(int i=0; i < numIncrements; ++i) {
+      long delta = randomIntBetween(1, 100);
+      total += delta;
+      counter.incrementPassedCount(delta);
     }
+    assertEquals(total, counter.getNumPassed());
+  }
 
-    /**
-     * Test that you can increment failed and it returns the correct count
-     * @throws Exception
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testFailed() throws Exception {
-        DatumStatusCounter counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        int numIncrements = randomIntBetween(1, 100000);
-        for(int i=0; i < numIncrements; ++i) {
-            counter.incrementFailedCount();
-        }
-        assertEquals(numIncrements, counter.getNumFailed());
+  /**
+   * Test that you can increment failed and it returns the correct count
+   * @throws Exception
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testFailed() throws Exception {
+    DatumStatusCounter counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    int numIncrements = randomIntBetween(1, 100000);
+    for(int i=0; i < numIncrements; ++i) {
+      counter.incrementFailedCount();
+    }
+    assertEquals(numIncrements, counter.getNumFailed());
 
-        unregisterMXBean();
+    unregisterMXBean();
 
-        counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        numIncrements = randomIntBetween(1, 100000);
-        long total = 0;
-        for(int i=0; i < numIncrements; ++i) {
-            long delta = randomIntBetween(1, 100);
-            total += delta;
-            counter.incrementFailedCount(delta);
-        }
-        assertEquals(total, counter.getNumFailed());
+    counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    numIncrements = randomIntBetween(1, 100000);
+    long total = 0;
+    for(int i=0; i < numIncrements; ++i) {
+      long delta = randomIntBetween(1, 100);
+      total += delta;
+      counter.incrementFailedCount(delta);
     }
+    assertEquals(total, counter.getNumFailed());
+  }
 
 
-    /**
-     * Test failure rate returns expected values
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testFailureRate() {
-        DatumStatusCounter counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        assertEquals(0.0, counter.getFailRate(), 0);
-        int failures = randomIntBetween(0, 100000);
-        int passes = randomIntBetween(0, 100000);
-        counter.incrementPassedCount(passes);
-        counter.incrementFailedCount(failures);
-        assertEquals((double)failures / (double)(passes + failures), counter.getFailRate(), 0);
-    }
+  /**
+   * Test failure rate returns expected values
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testFailureRate() {
+    DatumStatusCounter counter = new DatumStatusCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    assertEquals(0.0, counter.getFailRate(), 0);
+    int failures = randomIntBetween(0, 100000);
+    int passes = randomIntBetween(0, 100000);
+    counter.incrementPassedCount(passes);
+    counter.incrementFailedCount(failures);
+    assertEquals((double)failures / (double)(passes + failures), counter.getFailRate(), 0);
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/StreamsTaskCounterTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/StreamsTaskCounterTest.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/StreamsTaskCounterTest.java
index 95fd610..544b065 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/StreamsTaskCounterTest.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/counters/StreamsTaskCounterTest.java
@@ -23,139 +23,139 @@ import org.joda.time.DateTime;
 import org.junit.After;
 import org.junit.Test;
 
+import java.lang.management.ManagementFactory;
 import javax.management.InstanceNotFoundException;
 import javax.management.ObjectName;
-import java.lang.management.ManagementFactory;
 
 /**
  * Unit tests for {@link org.apache.streams.local.counters.StreamsTaskCounter}
  */
 public class StreamsTaskCounterTest extends RandomizedTest {
 
-    private static final String MBEAN_ID = "test_id";
-    private static final String STREAM_ID = "test_stream";
-    private static long STREAM_START_TIME = (new DateTime()).getMillis();
-
-    /**
-     * Remove registered mbeans from previous tests
-     * @throws Exception
-     */
-    @After
-    public void unregisterMXBean() throws Exception {
-        try {
-            ManagementFactory.getPlatformMBeanServer().unregisterMBean(new ObjectName(String.format(StreamsTaskCounter.NAME_TEMPLATE, MBEAN_ID, STREAM_ID, STREAM_START_TIME)));
-        } catch (InstanceNotFoundException ife) {
-            //No-op
-        }
+  private static final String MBEAN_ID = "test_id";
+  private static final String STREAM_ID = "test_stream";
+  private static long STREAM_START_TIME = (new DateTime()).getMillis();
+
+  /**
+   * Remove registered mbeans from previous tests
+   * @throws Exception
+   */
+  @After
+  public void unregisterMXBean() throws Exception {
+    try {
+      ManagementFactory.getPlatformMBeanServer().unregisterMBean(new ObjectName(String.format(StreamsTaskCounter.NAME_TEMPLATE, MBEAN_ID, STREAM_ID, STREAM_START_TIME)));
+    } catch (InstanceNotFoundException ife) {
+      //No-op
     }
-
-    /**
-     * Test constructor does not throw errors
-     */
-    @Test
-    public void testConstructor() {
-        try {
-            new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        } catch (Throwable t) {
-            fail("Constructor threw error : "+t.getMessage());
-        }
+  }
+
+  /**
+   * Test constructor does not throw errors
+   */
+  @Test
+  public void testConstructor() {
+    try {
+      new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    } catch (Throwable t) {
+      fail("Constructor threw error : "+t.getMessage());
     }
-
-    /**
-     * Test emitted increments correctly and returns expected value
-     * @throws Exception
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testEmitted() throws Exception {
-        StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        int numIncrements = randomIntBetween(1, 100000);
-        for(int i=0; i < numIncrements; ++i) {
-            counter.incrementEmittedCount();
-        }
-        assertEquals(numIncrements, counter.getNumEmitted());
-
-        unregisterMXBean();
-
-        counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        numIncrements = randomIntBetween(1, 100000);
-        long total = 0;
-        for(int i=0; i < numIncrements; ++i) {
-            long delta = randomIntBetween(1, 100);
-            total += delta;
-            counter.incrementEmittedCount(delta);
-        }
-        assertEquals(total, counter.getNumEmitted());
+  }
+
+  /**
+   * Test emitted increments correctly and returns expected value
+   * @throws Exception
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testEmitted() throws Exception {
+    StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    int numIncrements = randomIntBetween(1, 100000);
+    for(int i=0; i < numIncrements; ++i) {
+      counter.incrementEmittedCount();
     }
+    assertEquals(numIncrements, counter.getNumEmitted());
+
+    unregisterMXBean();
 
-    /**
-     * Test received increments correctly and returns expected value
-     * @throws Exception
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testReceived() throws Exception {
-        StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        int numIncrements = randomIntBetween(1, 100000);
-        for(int i=0; i < numIncrements; ++i) {
-            counter.incrementReceivedCount();
-        }
-        assertEquals(numIncrements, counter.getNumReceived());
-
-        unregisterMXBean();
-
-        counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        numIncrements = randomIntBetween(1, 100000);
-        long total = 0;
-        for(int i=0; i < numIncrements; ++i) {
-            long delta = randomIntBetween(1, 100);
-            total += delta;
-            counter.incrementReceivedCount(delta);
-        }
-        assertEquals(total, counter.getNumReceived());
+    counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    numIncrements = randomIntBetween(1, 100000);
+    long total = 0;
+    for(int i=0; i < numIncrements; ++i) {
+      long delta = randomIntBetween(1, 100);
+      total += delta;
+      counter.incrementEmittedCount(delta);
     }
+    assertEquals(total, counter.getNumEmitted());
+  }
+
+  /**
+   * Test received increments correctly and returns expected value
+   * @throws Exception
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testReceived() throws Exception {
+    StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    int numIncrements = randomIntBetween(1, 100000);
+    for(int i=0; i < numIncrements; ++i) {
+      counter.incrementReceivedCount();
+    }
+    assertEquals(numIncrements, counter.getNumReceived());
+
+    unregisterMXBean();
 
-    /**
-     * Test errors increments correctly and returns expected value
-     * @throws Exception
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testError() throws Exception {
-        StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        int numIncrements = randomIntBetween(1, 100000);
-        for(int i=0; i < numIncrements; ++i) {
-            counter.incrementErrorCount();
-        }
-        assertEquals(numIncrements, counter.getNumUnhandledErrors());
-
-        unregisterMXBean();
-
-        counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        numIncrements = randomIntBetween(1, 100000);
-        long total = 0;
-        for(int i=0; i < numIncrements; ++i) {
-            long delta = randomIntBetween(1, 100);
-            total += delta;
-            counter.incrementErrorCount(delta);
-        }
-        assertEquals(total, counter.getNumUnhandledErrors());
+    counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    numIncrements = randomIntBetween(1, 100000);
+    long total = 0;
+    for(int i=0; i < numIncrements; ++i) {
+      long delta = randomIntBetween(1, 100);
+      total += delta;
+      counter.incrementReceivedCount(delta);
     }
+    assertEquals(total, counter.getNumReceived());
+  }
+
+  /**
+   * Test errors increments correctly and returns expected value
+   * @throws Exception
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testError() throws Exception {
+    StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    int numIncrements = randomIntBetween(1, 100000);
+    for(int i=0; i < numIncrements; ++i) {
+      counter.incrementErrorCount();
+    }
+    assertEquals(numIncrements, counter.getNumUnhandledErrors());
+
+    unregisterMXBean();
 
-    /**
-     * Test error rate returns expected value
-     * @throws Exception
-     */
-    @Test
-    @Repeat(iterations = 3)
-    public void testErrorRate() throws Exception {
-        StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
-        assertEquals(0.0, counter.getErrorRate(), 0);
-        int failures = randomIntBetween(0, 100000);
-        int received = randomIntBetween(0, 100000);
-        counter.incrementReceivedCount(received);
-        counter.incrementErrorCount(failures);
-        assertEquals((double)failures / (double)(received), counter.getErrorRate(), 0);
+    counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    numIncrements = randomIntBetween(1, 100000);
+    long total = 0;
+    for(int i=0; i < numIncrements; ++i) {
+      long delta = randomIntBetween(1, 100);
+      total += delta;
+      counter.incrementErrorCount(delta);
     }
+    assertEquals(total, counter.getNumUnhandledErrors());
+  }
+
+  /**
+   * Test error rate returns expected value
+   * @throws Exception
+   */
+  @Test
+  @Repeat(iterations = 3)
+  public void testErrorRate() throws Exception {
+    StreamsTaskCounter counter = new StreamsTaskCounter(MBEAN_ID, STREAM_ID, STREAM_START_TIME);
+    assertEquals(0.0, counter.getErrorRate(), 0);
+    int failures = randomIntBetween(0, 100000);
+    int received = randomIntBetween(0, 100000);
+    counter.incrementReceivedCount(received);
+    counter.incrementErrorCount(failures);
+    assertEquals((double)failures / (double)(received), counter.getErrorRate(), 0);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandledThrowableThreadPoolExecutorTest.java
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandledThrowableThreadPoolExecutorTest.java b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandledThrowableThreadPoolExecutorTest.java
index 7e33ab9..e3b608d 100644
--- a/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandledThrowableThreadPoolExecutorTest.java
+++ b/streams-runtimes/streams-runtime-local/src/test/java/org/apache/streams/local/executors/ShutdownStreamOnUnhandledThrowableThreadPoolExecutorTest.java
@@ -20,6 +20,7 @@ package org.apache.streams.local.executors;
 
 import org.apache.streams.local.builders.LocalStreamBuilder;
 import org.apache.streams.util.ComponentUtils;
+
 import org.junit.After;
 import org.junit.Test;
 import org.mockito.invocation.InvocationOnMock;
@@ -27,7 +28,6 @@ import org.mockito.stubbing.Answer;
 
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 
@@ -35,7 +35,6 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 /**
  *
@@ -43,90 +42,90 @@ import static org.mockito.Mockito.when;
 public class ShutdownStreamOnUnhandledThrowableThreadPoolExecutorTest {
 
 
-    @After
-    public void removeLocalMBeans() {
-        try {
-            ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
-        } catch (Exception e) {
-            //No op.  proceed to next test
-        }
+  @After
+  public void removeLocalMBeans() {
+    try {
+      ComponentUtils.removeAllMBeansOfDomain("org.apache.streams.local");
+    } catch (Exception e) {
+      //No op.  proceed to next test
     }
-
-    @Test
-    public void testShutDownOnException() {
-        LocalStreamBuilder sb = mock(LocalStreamBuilder.class);
-        final AtomicBoolean isShutdown = new AtomicBoolean(false);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                isShutdown.set(true);
-                return null;
-            }
-        }).when(sb).stop();
-
-        final CountDownLatch latch = new CountDownLatch(1);
-
-        Runnable runnable = new Runnable() {
-            @Override
-            public void run() {
-                latch.countDown();
-                throw new RuntimeException("Testing Throwable Handling!");
-            }
-        };
-
-        ExecutorService executor = new ShutdownStreamOnUnhandleThrowableThreadPoolExecutor(1, sb);
-        executor.execute(runnable);
-        try {
-            latch.await();
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
-        executor.shutdownNow();
-        try {
-            executor.awaitTermination(1, TimeUnit.SECONDS);
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
-        assertTrue("Expected StreamBuilder shutdown to be called", isShutdown.get());
+  }
+
+  @Test
+  public void testShutDownOnException() {
+    LocalStreamBuilder sb = mock(LocalStreamBuilder.class);
+    final AtomicBoolean isShutdown = new AtomicBoolean(false);
+    doAnswer(new Answer() {
+      @Override
+      public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
+        isShutdown.set(true);
+        return null;
+      }
+    }).when(sb).stop();
+
+    final CountDownLatch latch = new CountDownLatch(1);
+
+    Runnable runnable = new Runnable() {
+      @Override
+      public void run() {
+        latch.countDown();
+        throw new RuntimeException("Testing Throwable Handling!");
+      }
+    };
+
+    ExecutorService executor = new ShutdownStreamOnUnhandleThrowableThreadPoolExecutor(1, sb);
+    executor.execute(runnable);
+    try {
+      latch.await();
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
     }
-
-
-    @Test
-    public void testNormalExecution() {
-        LocalStreamBuilder sb = mock(LocalStreamBuilder.class);
-        final AtomicBoolean isShutdown = new AtomicBoolean(false);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                isShutdown.set(true);
-                return null;
-            }
-        }).when(sb).stop();
-
-        final CountDownLatch latch = new CountDownLatch(1);
-
-        Runnable runnable = new Runnable() {
-            @Override
-            public void run() {
-                latch.countDown();
-            }
-        };
-
-        ExecutorService executor = new ShutdownStreamOnUnhandleThrowableThreadPoolExecutor(1, sb);
-        executor.execute(runnable);
-        try {
-            latch.await();
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
-        executor.shutdownNow();
-        try {
-            executor.awaitTermination(1, TimeUnit.SECONDS);
-        } catch (InterruptedException ie) {
-            Thread.currentThread().interrupt();
-        }
-        assertFalse("Expected StreamBuilder shutdown to be called", isShutdown.get());
+    executor.shutdownNow();
+    try {
+      executor.awaitTermination(1, TimeUnit.SECONDS);
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
+    }
+    assertTrue("Expected StreamBuilder shutdown to be called", isShutdown.get());
+  }
+
+
+  @Test
+  public void testNormalExecution() {
+    LocalStreamBuilder sb = mock(LocalStreamBuilder.class);
+    final AtomicBoolean isShutdown = new AtomicBoolean(false);
+    doAnswer(new Answer() {
+      @Override
+      public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
+        isShutdown.set(true);
+        return null;
+      }
+    }).when(sb).stop();
+
+    final CountDownLatch latch = new CountDownLatch(1);
+
+    Runnable runnable = new Runnable() {
+      @Override
+      public void run() {
+        latch.countDown();
+      }
+    };
+
+    ExecutorService executor = new ShutdownStreamOnUnhandleThrowableThreadPoolExecutor(1, sb);
+    executor.execute(runnable);
+    try {
+      latch.await();
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
+    }
+    executor.shutdownNow();
+    try {
+      executor.awaitTermination(1, TimeUnit.SECONDS);
+    } catch (InterruptedException ie) {
+      Thread.currentThread().interrupt();
     }
+    assertFalse("Expected StreamBuilder shutdown to be called", isShutdown.get());
+  }
 
 
 }


[03/42] incubator-streams git commit: STREAMS-440: custom checkstyle.xml, address compliance

Posted by sb...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-runtimes/streams-runtime-storm/pom.xml
----------------------------------------------------------------------
diff --git a/streams-runtimes/streams-runtime-storm/pom.xml b/streams-runtimes/streams-runtime-storm/pom.xml
deleted file mode 100644
index ded3efc..0000000
--- a/streams-runtimes/streams-runtime-storm/pom.xml
+++ /dev/null
@@ -1,124 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~
-  ~   http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <artifactId>streams-runtimes</artifactId>
-        <groupId>org.apache.streams</groupId>
-        <version>0.4-incubating-SNAPSHOT</version>
-    </parent>
-    <artifactId>streams-runtime-storm</artifactId>
-    <name>${project.artifactId}</name>
-    <description>Apache Streams Runtimes</description>
-
-    <properties>
-        <storm.version>0.9.1-incubating</storm.version>
-        <scala.version>2.9.2</scala.version>
-        <zkclient.version>0.4</zkclient.version>
-    </properties>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.streams</groupId>
-            <artifactId>streams-config</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.streams</groupId>
-            <artifactId>streams-core</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.streams</groupId>
-            <artifactId>streams-util</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-lang3</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-collections4</artifactId>
-            <version>4.0</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.storm</groupId>
-            <artifactId>storm-core</artifactId>
-            <version>${storm.version}</version>
-            <scope>provided</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>commons-logging</groupId>
-                    <artifactId>commons-logging</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>jcl-over-slf4j</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-            <version>${scala.version}</version>
-            <scope>compile</scope>
-            <type>jar</type>
-        </dependency>
-        <dependency>
-            <groupId>com.101tec</groupId>
-            <artifactId>zkclient</artifactId>
-            <version>${zkclient.version}</version>
-            <scope>compile</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>log4j</groupId>
-                    <artifactId>log4j</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-log4j12</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.streams</groupId>
-            <artifactId>streams-testing</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-            <type>test-jar</type>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <sourceDirectory>src/main/java</sourceDirectory>
-        <testSourceDirectory>src/test/java</testSourceDirectory>
-        <resources>
-            <resource>
-                <directory>src/main/resources</directory>
-            </resource>
-        </resources>
-        <testResources>
-            <testResource>
-                <directory>src/test/resources</directory>
-            </testResource>
-        </testResources>
-    </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-schemas/streams-schema-activitystreams/src/test/java/org/w3c/activitystreams/test/SchemaValidationTest.java
----------------------------------------------------------------------
diff --git a/streams-schemas/streams-schema-activitystreams/src/test/java/org/w3c/activitystreams/test/SchemaValidationTest.java b/streams-schemas/streams-schema-activitystreams/src/test/java/org/w3c/activitystreams/test/SchemaValidationTest.java
index 8f22450..6344c3c 100644
--- a/streams-schemas/streams-schema-activitystreams/src/test/java/org/w3c/activitystreams/test/SchemaValidationTest.java
+++ b/streams-schemas/streams-schema-activitystreams/src/test/java/org/w3c/activitystreams/test/SchemaValidationTest.java
@@ -40,47 +40,50 @@ import java.util.Set;
 import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.core.Is.is;
 
+/**
+ * Test validity of documents vs schemas.
+ */
 public class SchemaValidationTest {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(SchemaValidationTest.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(SchemaValidationTest.class);
 
-    private final static ObjectMapper MAPPER = new ObjectMapper();
+  private static final ObjectMapper MAPPER = new ObjectMapper();
 
-    /**
-     * Tests that activities matching core-ex* can be parsed by apache streams
-     *
-     * @throws Exception
-     */
-    @Test
-    public void validateToSchema() throws Exception {
+  /**
+   * Tests that activities matching core-ex* can be parsed by apache streams.
+   *
+   * @throws Exception Test Exception
+   */
+  @Test
+  public void testValidateToSchema() throws Exception {
 
-        JsonSchemaFactory factory = new JsonSchemaFactory();
+    JsonSchemaFactory factory = new JsonSchemaFactory();
 
-        InputStream testActivityFolderStream = SchemaValidationTest.class.getClassLoader()
-                .getResourceAsStream("activities");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+    InputStream testActivityFolderStream = SchemaValidationTest.class.getClassLoader()
+        .getResourceAsStream("activities");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
 
-        for (String file : files) {
-            if( !file.startsWith(".") ) {
+    for (String file : files) {
+      if ( !file.startsWith(".") ) {
 
-                LOGGER.info("Test File: activities/" + file);
-                String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/activities/" + file)));
-                LOGGER.info("Test Document JSON: " + testFileString);
-                JsonNode testNode = MAPPER.readValue(testFileString, ObjectNode.class);
-                LOGGER.info("Test Document Object:" + testNode);
-                LOGGER.info("Test Schema File: " + "target/classes/verbs/" + file);
-                String testSchemaString = new String(Files.readAllBytes(Paths.get("target/classes/verbs/" + file)));
-                LOGGER.info("Test Schema JSON: " + testSchemaString);
-                JsonNode testSchemaNode = MAPPER.readValue(testFileString, ObjectNode.class);
-                LOGGER.info("Test Schema Object:" + testSchemaNode);
-                JsonSchema testSchema = factory.getSchema(testSchemaNode);
-                LOGGER.info("Test Schema:" + testSchema);
+        LOGGER.info("Test File: activities/" + file);
+        String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/activities/" + file)));
+        LOGGER.info("Test Document JSON: " + testFileString);
+        JsonNode testNode = MAPPER.readValue(testFileString, ObjectNode.class);
+        LOGGER.info("Test Document Object:" + testNode);
+        LOGGER.info("Test Schema File: " + "target/classes/verbs/" + file);
+        String testSchemaString = new String(Files.readAllBytes(Paths.get("target/classes/verbs/" + file)));
+        LOGGER.info("Test Schema JSON: " + testSchemaString);
+        JsonNode testSchemaNode = MAPPER.readValue(testFileString, ObjectNode.class);
+        LOGGER.info("Test Schema Object:" + testSchemaNode);
+        JsonSchema testSchema = factory.getSchema(testSchemaNode);
+        LOGGER.info("Test Schema:" + testSchema);
 
-                Set<ValidationMessage> errors = testSchema.validate(testNode);
-                assertThat(errors.size(), is(0));
+        Set<ValidationMessage> errors = testSchema.validate(testNode);
+        assertThat(errors.size(), is(0));
 
-            }
-        }
+      }
     }
+  }
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-schemas/streams-schema-activitystreams2/src/test/java/org/w3c/activitystreams/test/ExamplesSerDeIT.java
----------------------------------------------------------------------
diff --git a/streams-schemas/streams-schema-activitystreams2/src/test/java/org/w3c/activitystreams/test/ExamplesSerDeIT.java b/streams-schemas/streams-schema-activitystreams2/src/test/java/org/w3c/activitystreams/test/ExamplesSerDeIT.java
index b1b5824..8500efd 100644
--- a/streams-schemas/streams-schema-activitystreams2/src/test/java/org/w3c/activitystreams/test/ExamplesSerDeIT.java
+++ b/streams-schemas/streams-schema-activitystreams2/src/test/java/org/w3c/activitystreams/test/ExamplesSerDeIT.java
@@ -33,103 +33,106 @@ import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.util.List;
 
+/**
+ * Tests that activities matching core-ex* can be parsed by apache streams.
+ */
 public class ExamplesSerDeIT {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(ExamplesSerDeIT.class);
-
-    private final static ObjectMapper MAPPER = new ObjectMapper();
-
-    /**
-     * Tests that activities matching core-ex* can be parsed by apache streams
-     *
-     * @throws Exception
-     */
-    @Test
-    public void testCoreSerDe() throws Exception {
-
-        InputStream testActivityFolderStream = ExamplesSerDeIT.class.getClassLoader()
-                .getResourceAsStream("w3c/activitystreams-master/test");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
-
-        for (String file : files) {
-            if( !file.startsWith(".") && file.contains("core-ex") ) {
-                LOGGER.info("File: activitystreams-master/test/" + file);
-                String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/w3c/activitystreams-master/test/" + file)));
-                LOGGER.info("Content: " + testFileString);
-                ObjectNode testFileObjectNode = MAPPER.readValue(testFileString, ObjectNode.class);
-                LOGGER.info("Object:" + testFileObjectNode);
-            }
-        }
+  private static final Logger LOGGER = LoggerFactory.getLogger(ExamplesSerDeIT.class);
+
+  private static final ObjectMapper MAPPER = new ObjectMapper();
+
+  /**
+   * Tests that activities matching core-ex* can be parsed by apache streams.
+   *
+   * @throws Exception test exception
+   */
+  @Test
+  public void testCoreSerDe() throws Exception {
+
+    InputStream testActivityFolderStream = ExamplesSerDeIT.class.getClassLoader()
+        .getResourceAsStream("w3c/activitystreams-master/test");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+
+    for (String file : files) {
+      if ( !file.startsWith(".") && file.contains("core-ex") ) {
+        LOGGER.info("File: activitystreams-master/test/" + file);
+        String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/w3c/activitystreams-master/test/" + file)));
+        LOGGER.info("Content: " + testFileString);
+        ObjectNode testFileObjectNode = MAPPER.readValue(testFileString, ObjectNode.class);
+        LOGGER.info("Object:" + testFileObjectNode);
+      }
     }
-
-    /**
-     * Tests that activities matching simple* can be parsed by apache streams
-     *
-     * @throws Exception
-     */
-    @Test
-    public void testSimpleSerDe() throws Exception {
-
-        InputStream testActivityFolderStream = ExamplesSerDeIT.class.getClassLoader()
-                .getResourceAsStream("w3c/activitystreams-master/test");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
-
-        for (String file : files) {
-            if( !file.startsWith(".") && file.contains("simple") ) {
-                LOGGER.info("File: activitystreams-master/test/" + file);
-                String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/w3c/activitystreams-master/test/" + file)));
-                LOGGER.info("Content: " + testFileString);
-                ObjectNode testFileObjectNode = MAPPER.readValue(testFileString, ObjectNode.class);
-                LOGGER.info("Object:" + testFileObjectNode);
-            }
-        }
+  }
+
+  /**
+   * Tests that activities matching simple* can be parsed by apache streams.
+   *
+   * @throws Exception test exception
+   */
+  @Test
+  public void testSimpleSerDe() throws Exception {
+
+    InputStream testActivityFolderStream = ExamplesSerDeIT.class.getClassLoader()
+        .getResourceAsStream("w3c/activitystreams-master/test");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+
+    for (String file : files) {
+      if ( !file.startsWith(".") && file.contains("simple") ) {
+        LOGGER.info("File: activitystreams-master/test/" + file);
+        String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/w3c/activitystreams-master/test/" + file)));
+        LOGGER.info("Content: " + testFileString);
+        ObjectNode testFileObjectNode = MAPPER.readValue(testFileString, ObjectNode.class);
+        LOGGER.info("Object:" + testFileObjectNode);
+      }
     }
-
-    /**
-     * Tests that activities matching vocabulary-ex* can be parsed by apache streams
-     *
-     * @throws Exception
-     */
-    @Ignore
-    @Test
-    public void testVocabularySerDe() throws Exception {
-
-        InputStream testActivityFolderStream = ExamplesSerDeIT.class.getClassLoader()
-                .getResourceAsStream("w3c/activitystreams-master/test");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
-
-        for (String file : files) {
-            if( !file.startsWith(".") && file.contains("vocabulary-ex") ) {
-                LOGGER.info("File: activitystreams-master/test/" + file);
-                String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/w3c/activitystreams-master/test/" + file)));
-                LOGGER.info("Content: " + testFileString);
-                ObjectNode testFileObjectNode = MAPPER.readValue(testFileString, ObjectNode.class);
-                LOGGER.info("Object:" + testFileObjectNode);
-            }
-        }
+  }
+
+  /**
+   * Tests that activities matching vocabulary-ex* can be parsed by apache streams.
+   *
+   * @throws Exception test exception
+   */
+  @Ignore
+  @Test
+  public void testVocabularySerDe() throws Exception {
+
+    InputStream testActivityFolderStream = ExamplesSerDeIT.class.getClassLoader()
+        .getResourceAsStream("w3c/activitystreams-master/test");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+
+    for (String file : files) {
+      if ( !file.startsWith(".") && file.contains("vocabulary-ex") ) {
+        LOGGER.info("File: activitystreams-master/test/" + file);
+        String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/w3c/activitystreams-master/test/" + file)));
+        LOGGER.info("Content: " + testFileString);
+        ObjectNode testFileObjectNode = MAPPER.readValue(testFileString, ObjectNode.class);
+        LOGGER.info("Object:" + testFileObjectNode);
+      }
     }
-
-    /**
-     * Tests that activities expect to fail cannot be parsed by apache streams
-     *
-     * @throws Exception
-     */
-    @Ignore
-    @Test
-    public void testFailSerDe() throws Exception {
-
-        InputStream testActivityFolderStream = ExamplesSerDeIT.class.getClassLoader()
-                .getResourceAsStream("w3c/activitystreams-master/test/fail");
-        List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
-
-        for (String file : files) {
-            if( !file.startsWith(".") && file.contains("vocabulary-ex") ) {
-                LOGGER.info("File: activitystreams-master/test/fail/" + file);
-                String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/w3c/activitystreams-master/test/" + file)));
-                LOGGER.info("Content: " + testFileString);
-                ObjectNode testFileObjectNode = MAPPER.readValue(testFileString, ObjectNode.class);
-                LOGGER.info("Object:" + testFileObjectNode);
-            }
-        }
+  }
+
+  /**
+   * Tests that activities expect to fail cannot be parsed by apache streams.
+   *
+   * @throws Exception test exception
+   */
+  @Ignore
+  @Test
+  public void testFailSerDe() throws Exception {
+
+    InputStream testActivityFolderStream = ExamplesSerDeIT.class.getClassLoader()
+        .getResourceAsStream("w3c/activitystreams-master/test/fail");
+    List<String> files = IOUtils.readLines(testActivityFolderStream, Charsets.UTF_8);
+
+    for (String file : files) {
+      if ( !file.startsWith(".") && file.contains("vocabulary-ex") ) {
+        LOGGER.info("File: activitystreams-master/test/fail/" + file);
+        String testFileString = new String(Files.readAllBytes(Paths.get("target/test-classes/w3c/activitystreams-master/test/" + file)));
+        LOGGER.info("Content: " + testFileString);
+        ObjectNode testFileObjectNode = MAPPER.readValue(testFileString, ObjectNode.class);
+        LOGGER.info("Object:" + testFileObjectNode);
+      }
     }
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/ComponentUtils.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/ComponentUtils.java b/streams-util/src/main/java/org/apache/streams/util/ComponentUtils.java
index 6037f28..514c851 100644
--- a/streams-util/src/main/java/org/apache/streams/util/ComponentUtils.java
+++ b/streams-util/src/main/java/org/apache/streams/util/ComponentUtils.java
@@ -15,126 +15,116 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util;
 
 import org.apache.commons.lang3.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.management.*;
 import java.lang.management.ManagementFactory;
 import java.util.Queue;
 import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.TimeUnit;
+import javax.management.InstanceAlreadyExistsException;
+import javax.management.MBeanRegistrationException;
+import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
+import javax.management.NotCompliantMBeanException;
+import javax.management.ObjectName;
 
 /**
  * Common utilities for Streams components.
  */
 public class ComponentUtils {
-    private static final Logger LOGGER = LoggerFactory.getLogger(ComponentUtils.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(ComponentUtils.class);
 
-    /**
-     * Certain types of queues will fail to {@link java.util.Queue#offer(Object)} an item due to many factors
-     * depending on the type of queue. <code>offerUntilSuccess</code> will not return until the item has been
-     * successfully queued onto the desired queue
-     * @param entry item to queue
-     * @param queue queue to add the entry to
-     * @param <T>
-     */
-    public static <T> void offerUntilSuccess(T entry, Queue<T> queue) {
-        boolean success;
-        do {
-            success = queue.offer(entry);
-            Thread.yield();
-        }
-        while( !success );
+  /**
+   * Certain types of queues will fail to {@link java.util.Queue#offer(Object)} an item due to many factors
+   * depending on the type of queue. <code>offerUntilSuccess</code> will not return until the item has been
+   * successfully queued onto the desired queue
+   * @param entry item to queue
+   * @param queue queue to add the entry to
+   * @param <T> type
+   */
+  public static <T> void offerUntilSuccess(T entry, Queue<T> queue) {
+    boolean success;
+    do {
+      success = queue.offer(entry);
+      Thread.yield();
     }
+    while ( !success );
+  }
 
-    /**
-     * Certain types of queues will return null when calling {@link java.util.Queue#poll()} due to many factors depending
-     * on the type of queue.  <code>pollWhileNotEmpty</code> will poll the queue until an item from the queue is returned
-     * or the queue is empty.  If the queue is empty it will return NULL.
-     * @param queue
-     * @param <T>
-     * @return
-     */
-    public static <T> T pollWhileNotEmpty(Queue<T> queue) {
-        T item = queue.poll();
-        while(!queue.isEmpty() && item == null) {
-            Thread.yield();
-            item = queue.poll();
-        }
-        return item;
+  /**
+   * Certain types of queues will return null when calling {@link java.util.Queue#poll()} due to many factors depending
+   * on the type of queue.  <code>pollWhileNotEmpty</code> will poll the queue until an item from the queue is returned
+   * or the queue is empty.  If the queue is empty it will return NULL.
+   * @param queue queue to read the entry from
+   * @param <T> type
+   * @return result
+   */
+  public static <T> T pollWhileNotEmpty(Queue<T> queue) {
+    T item = queue.poll();
+    while (!queue.isEmpty() && item == null) {
+      Thread.yield();
+      item = queue.poll();
     }
+    return item;
+  }
 
-
-    public static String pollUntilStringNotEmpty(Queue queue) {
-
-        String result = null;
-        do {
-            synchronized( ComponentUtils.class ) {
-                try {
-                    result = (String) queue.remove();
-                } catch( Exception e ) {}
-            }
-            Thread.yield();
+  /**
+   * Attempts to safely {@link java.util.concurrent.ExecutorService#shutdown()}
+   * and {@link java.util.concurrent.ExecutorService#awaitTermination(long, java.util.concurrent.TimeUnit)}
+   * of an {@link java.util.concurrent.ExecutorService}.
+   * @param stream service to be shutdown
+   * @param initialWait time in seconds to wait for currently running threads to finish execution
+   * @param secondaryWait time in seconds to wait for running threads that did not terminate to acknowledge their forced termination
+   */
+  public static void shutdownExecutor(ExecutorService stream, int initialWait, int secondaryWait) {
+    stream.shutdown();
+    try {
+      if (!stream.awaitTermination(initialWait, TimeUnit.SECONDS)) {
+        stream.shutdownNow();
+        if (!stream.awaitTermination(secondaryWait, TimeUnit.SECONDS)) {
+          LOGGER.error("Executor Service did not terminate");
         }
-        while( result == null && !StringUtils.isNotEmpty(result) );
-
-        return result;
+      }
+    } catch (InterruptedException ie) {
+      stream.shutdownNow();
+      Thread.currentThread().interrupt();
     }
+  }
 
-    /**
-     * Attempts to safely {@link java.util.concurrent.ExecutorService#shutdown()} and {@link java.util.concurrent.ExecutorService#awaitTermination(long, java.util.concurrent.TimeUnit)}
-     * of an {@link java.util.concurrent.ExecutorService}.
-     * @param stream service to be shutdown
-     * @param initialWait time in seconds to wait for currently running threads to finish execution
-     * @param secondaryWait time in seconds to wait for running threads that did not terminate in the first wait to acknowledge their forced termination
-     */
-    public static void shutdownExecutor(ExecutorService stream, int initialWait, int secondaryWait) {
-        stream.shutdown();
-        try {
-            if (!stream.awaitTermination(initialWait, TimeUnit.SECONDS)) {
-                stream.shutdownNow();
-                if (!stream.awaitTermination(secondaryWait, TimeUnit.SECONDS)) {
-                    LOGGER.error("Executor Service did not terminate");
-                }
-            }
-        } catch (InterruptedException ie) {
-            stream.shutdownNow();
-            Thread.currentThread().interrupt();
-        }
+  /**
+   * Removes all mbeans registered undered a specific domain.  Made specificly to clean up at unit tests
+   * @param domain mbean domain
+   */
+  public static void removeAllMBeansOfDomain(String domain) throws Exception {
+    MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+    domain = domain.endsWith(":") ? domain : domain + ":";
+    ObjectName objectName = new ObjectName(domain + "*");
+    Set<ObjectName> mbeanNames = mbs.queryNames(objectName, null);
+    for (ObjectName name : mbeanNames) {
+      mbs.unregisterMBean(name);
     }
+  }
 
-    /**
-     * Removes all mbeans registered undered a specific domain.  Made specificly to clean up at unit tests
-     * @param domain
-     */
-    public static void removeAllMBeansOfDomain(String domain) throws Exception {
-        MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-        domain = domain.endsWith(":") ? domain : domain+":";
-        ObjectName objectName = new ObjectName(domain+"*");
-        Set<ObjectName> mbeanNames = mbs.queryNames(objectName, null);
-        for(ObjectName name : mbeanNames) {
-            mbs.unregisterMBean(name);
-        }
-    }
-
-    /**
-     * Attempts to register an object with local MBeanServer.  Throws runtime exception on errors.
-     * @param name name to register bean with
-     * @param mbean mbean to register
-     */
-    public static <V> void registerLocalMBean(String name, V mbean) {
-        try {
-            ObjectName objectName = new ObjectName(name);
-            MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-            mbs.registerMBean(mbean, objectName);
-        } catch (MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException e) {
-            LOGGER.error("Failed to register MXBean : {}", e);
-            throw new RuntimeException(e);
-        }
+  /**
+   * Attempts to register an object with local MBeanServer.  Throws runtime exception on errors.
+   * @param name name to register bean with
+   * @param mbean mbean to register
+   */
+  public static <V> void registerLocalMBean(String name, V mbean) {
+    try {
+      ObjectName objectName = new ObjectName(name);
+      MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+      mbs.registerMBean(mbean, objectName);
+    } catch (MalformedObjectNameException | InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException ex) {
+      LOGGER.error("Failed to register MXBean : {}", ex);
+      throw new RuntimeException(ex);
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/DateUtil.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/DateUtil.java b/streams-util/src/main/java/org/apache/streams/util/DateUtil.java
deleted file mode 100644
index 7bbb8e9..0000000
--- a/streams-util/src/main/java/org/apache/streams/util/DateUtil.java
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.streams.util;
-
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormatter;
-import org.joda.time.format.ISODateTimeFormat;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.*;
-
-
-/*
- *
- * If you can think of a better way, feel free to implement. This was a great class that I found that
- * solves the majority of the issue I was dealing with.
- *
- * smashew 11=13=2012
- *
- * Site:
- * http://stackoverflow.com/questions/3389348/parse-any-date-in-java
- */
-
-public class DateUtil
-{
-
-    private static final String REGEX_ONLY_NUMBERS = "[0-9]+";
-
-	private static final Map<String, String> DATE_FORMAT_REGEXPS = new HashMap<String, String>()
-	{
-		private static final long serialVersionUID = 1L;
-		{
-			put("^\\d{8}$", "yyyyMMdd");
-			put("^\\d{1,2}-\\d{1,2}-\\d{4}$", "dd-MM-yyyy");
-			put("^\\d{4}-\\d{1,2}-\\d{1,2}$", "yyyy-MM-dd");
-			put("^\\d{1,2}/\\d{1,2}/\\d{4}$", "MM/dd/yyyy");
-			put("^\\d{4}/\\d{1,2}/\\d{1,2}$", "yyyy/MM/dd");
-			put("^\\d{1,2}\\s[a-z]{3}\\s\\d{4}$", "dd MMM yyyy");
-			put("^\\d{1,2}\\s[a-z]{4,}\\s\\d{4}$", "dd MMMM yyyy");
-			put("^\\d{12}$", "yyyyMMddHHmm");
-			put("^\\d{8}\\s\\d{4}$", "yyyyMMdd HHmm");
-			put("^\\d{1,2}-\\d{1,2}-\\d{4}\\s\\d{1,2}:\\d{2}$", "dd-MM-yyyy HH:mm");
-			put("^\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}:\\d{2}$", "yyyy-MM-dd HH:mm");
-			put("^\\d{1,2}/\\d{1,2}/\\d{4}\\s\\d{1,2}:\\d{2}$", "MM/dd/yyyy HH:mm");
-			put("^\\d{4}/\\d{1,2}/\\d{1,2}\\s\\d{1,2}:\\d{2}$", "yyyy/MM/dd HH:mm");
-			put("^\\d{1,2}\\s[a-z]{3}\\s\\d{4}\\s\\d{1,2}:\\d{2}$", "dd MMM yyyy HH:mm");
-			put("^\\d{1,2}\\s[a-z]{4,}\\s\\d{4}\\s\\d{1,2}:\\d{2}$", "dd MMMM yyyy HH:mm");
-			put("^\\d{14}$", "yyyyMMddHHmmss");
-			put("^\\d{8}\\s\\d{6}$", "yyyyMMdd HHmmss");
-			put("^\\d{1,2}-\\d{1,2}-\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}$", "dd-MM-yyyy HH:mm:ss");
-			put("^\\d{4}-\\d{1,2}-\\d{1,2}\\s\\d{1,2}:\\d{2}:\\d{2}$", "yyyy-MM-dd HH:mm:ss");
-			put("^\\d{1,2}/\\d{1,2}/\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}$", "MM/dd/yyyy HH:mm:ss");
-			put("^\\d{4}/\\d{1,2}/\\d{1,2}\\s\\d{1,2}:\\d{2}:\\d{2}$", "yyyy/MM/dd HH:mm:ss");
-			put("^\\d{1,2}\\s[a-z]{3}\\s\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}$", "dd MMM yyyy HH:mm:ss");
-			put("^\\d{1,2}\\s[a-z]{4,}\\s\\d{4}\\s\\d{1,2}:\\d{2}:\\d{2}$", "dd MMMM yyyy HH:mm:ss");
-		}
-	};
-
-	/**
-	 * Determine SimpleDateFormat pattern matching with the given date string. Returns null if format is unknown. You
-	 * can simply extend DateUtil with more formats if needed.
-	 *
-	 * @param dateString
-	 *             The date string to determine the SimpleDateFormat pattern for.
-	 * @return The matching SimpleDateFormat pattern, or null if format is unknown.
-	 * @see java.text.SimpleDateFormat
-	 */
-	public static String determineDateFormat(String dateString)
-        throws ParseException
-	{
-		for (String regexp : DATE_FORMAT_REGEXPS.keySet())
-			if (dateString.toLowerCase().matches(regexp))
-				return DATE_FORMAT_REGEXPS.get(regexp);
-
-        throw new ParseException("unable to parse date",0);
-	}
-
-	public static DateTime determineDate(String dateString)
-		throws ParseException
-	{
-        // Trim the string just in case it is dirty.
-        dateString = dateString.trim();
-
-        // check to see if it looks like it is millis. If so, parse as millis and return.
-        if(dateString.matches(REGEX_ONLY_NUMBERS))
-            return new DateTime(new Date(Long.parseLong(dateString)));
-
-        try
-        {
-            // try to parse the string into a java.date object, if possible.
-            SimpleDateFormat dateFormat = new SimpleDateFormat(determineDateFormat(dateString));
-            dateFormat.setLenient(false);
-            return new DateTime(dateFormat.parse(dateString));
-        }
-        catch(Exception e)
-        {
-
-        }
-
-        return new DateTime(DateTime.parse(dateString));
-	}
-
-    public static DateTime determineDateTime(String dateString)
-            throws ParseException
-    {
-        return new DateTime(determineDate(dateString));
-    }
-
-    public static DateTime determineDateTime(String dateString, DateTimeZone theTimeZone)
-            throws ParseException
-    {
-        DateTime beforeTimeZone = determineDateTime(dateString);
-        return new DateTime(beforeTimeZone.getYear(),beforeTimeZone.getMonthOfYear(), beforeTimeZone.getDayOfMonth(), beforeTimeZone.getHourOfDay(), beforeTimeZone.getMinuteOfHour(), beforeTimeZone.getSecondOfMinute(), beforeTimeZone.getMillisOfSecond(), theTimeZone);
-    }
-
-
-    public static String getAliasForDate(String date, String prefix) throws ParseException {
-        return getAliasesForDateRange(date, null, prefix).iterator().next();
-    }
-
-    public static String getAliasForDate(DateTime date, String prefix) throws ParseException {
-        return getAliasesForDateRange(date, null, prefix).iterator().next();
-    }
-
-    public static Set<String> getAliasesForDateRange(String starDate, String endDate, String prefix)
-        throws ParseException
-    {
-        DateTime start = null;
-        DateTime end = null;
-        DateTimeFormatter df = ISODateTimeFormat.dateTimeNoMillis();
-        try {
-            start = df.parseDateTime(starDate);
-        } catch (Exception e) {
-            //do nothing. try to parse with other parsers
-        }
-        if(start == null) {
-            start = determineDateTime(starDate);
-        }
-        if(endDate != null) {
-            try {
-                end = df.parseDateTime(endDate);
-            } catch (Exception e) {
-                //do nothing. try to parse with other parsers
-            }
-            if( end == null)
-                end = determineDateTime(endDate);
-        }
-        return getAliasesForDateRange(start, end, prefix);
-    }
-
-    public static Set<String> getAliasesForDateRange(DateTime startDate, DateTime endDate, String prefix) {
-        Set<String> aliases = new HashSet<String>();
-        aliases.add(prefix+"_"+getDateAbbreviation(startDate.getYear(), startDate.getMonthOfYear()));
-        if(endDate == null) {
-            return aliases;
-        }
-        while(endDate.isAfter(startDate)) {
-            aliases.add(prefix+"_"+getDateAbbreviation(endDate.getYear(), endDate.getMonthOfYear()));
-            endDate = endDate.minusMonths(1);
-        }
-        return aliases;
-    }
-
-    private static String getDateAbbreviation(int year, int month) {
-        if(month > 9) {
-            return Integer.toString(year)+Integer.toString(month);
-        }
-        else {
-            return Integer.toString(year)+"0"+Integer.toString(month);
-        }
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/GuidUtils.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/GuidUtils.java b/streams-util/src/main/java/org/apache/streams/util/GuidUtils.java
index 1972bc7..2d129de 100644
--- a/streams-util/src/main/java/org/apache/streams/util/GuidUtils.java
+++ b/streams-util/src/main/java/org/apache/streams/util/GuidUtils.java
@@ -29,20 +29,26 @@ import java.nio.charset.Charset;
  */
 public class GuidUtils {
 
-    private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
+  private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
 
-    public static String generateGuid(String... parts) {
+  /**
+   * generateGuid from list of parts.
+   * @param parts list of parts
+   * @return guid
+   */
+  public static String generateGuid(String... parts) {
 
-        StringBuilder seed = new StringBuilder();
-        for( String part : parts ) {
-            Preconditions.checkNotNull(part);
-            Preconditions.checkArgument(!Strings.isNullOrEmpty(part));
-            seed.append(part);
-        }
+    StringBuilder seed = new StringBuilder();
 
-        String hash = Hashing.goodFastHash(24).hashString(seed, UTF8_CHARSET).asBytes().toString();
+    for ( String part : parts ) {
+      Preconditions.checkNotNull(part);
+      Preconditions.checkArgument(!Strings.isNullOrEmpty(part));
+      seed.append(part);
+    }
 
-        return hash;
+    String hash = Hashing.goodFastHash(24).hashString(seed, UTF8_CHARSET).asBytes().toString();
 
-    }
+    return hash;
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/SerializationUtil.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/SerializationUtil.java b/streams-util/src/main/java/org/apache/streams/util/SerializationUtil.java
index de324d2..ba22d3d 100644
--- a/streams-util/src/main/java/org/apache/streams/util/SerializationUtil.java
+++ b/streams-util/src/main/java/org/apache/streams/util/SerializationUtil.java
@@ -20,7 +20,11 @@ package org.apache.streams.util;
 
 import org.apache.commons.io.input.ClassLoaderObjectInputStream;
 
-import java.io.*;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
 
 /**
  * SerializationUtil contains methods for serializing, deserializing, and cloning
@@ -28,47 +32,62 @@ import java.io.*;
  */
 public class SerializationUtil {
 
-    /**
-     * BORROwED FROM APACHE STORM PROJECT
-     * @param obj
-     * @return
-     */
-    public static byte[] serialize(Object obj) {
-        try {
-            ByteArrayOutputStream bos = new ByteArrayOutputStream();
-            ObjectOutputStream oos = new ObjectOutputStream(bos);
-            oos.writeObject(obj);
-            oos.close();
-            return bos.toByteArray();
-        } catch(IOException ioe) {
-            throw new RuntimeException(ioe);
-        }
+  /**
+   * serialize Object as byte array.
+   *
+   * <p/>
+   * BORROwED FROM APACHE STORM PROJECT
+   *
+   * @param obj Object
+   * @return byte[]
+   */
+  public static byte[] serialize(Object obj) {
+    try {
+      ByteArrayOutputStream bos = new ByteArrayOutputStream();
+      ObjectOutputStream oos = new ObjectOutputStream(bos);
+      oos.writeObject(obj);
+      oos.close();
+      return bos.toByteArray();
+    } catch (IOException ioe) {
+      throw new RuntimeException(ioe);
     }
+  }
 
-    /**
-     * BORROwED FROM APACHE STORM PROJECT
-     * @param serialized
-     * @return
-     */
-    public static Object deserialize(byte[] serialized) {
-        try {
-            ByteArrayInputStream bis = new ByteArrayInputStream(serialized);
-            ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
-            ObjectInputStream ois = new ClassLoaderObjectInputStream(classLoader, bis);
-            Object ret = ois.readObject();
-            ois.close();
-            return ret;
-        } catch(IOException ioe) {
-            throw new RuntimeException(ioe);
-        } catch(ClassNotFoundException e) {
-            throw new RuntimeException(e);
-        }
+  /**
+   * deserialize byte array as Object.
+   *
+   * <p/>
+   * BORROwED FROM APACHE STORM PROJECT
+   *
+   * @param serialized byte[]
+   * @return Object
+   */
+  public static Object deserialize(byte[] serialized) {
+    try {
+      ByteArrayInputStream bis = new ByteArrayInputStream(serialized);
+      ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+      ObjectInputStream ois = new ClassLoaderObjectInputStream(classLoader, bis);
+      Object ret = ois.readObject();
+      ois.close();
+      return ret;
+    } catch (IOException ioe) {
+      throw new RuntimeException(ioe);
+    } catch (ClassNotFoundException ex) {
+      throw new RuntimeException(ex);
     }
+  }
 
-
-    public static <T> T cloneBySerialization(T obj) {
-        if( obj != null )
-            return (T) deserialize(serialize(obj));
-        else return null;
+  /**
+   * clone Object by serialization.
+   * @param obj Object
+   * @param <T> type
+   * @return cloned Object
+   */
+  public static <T> T cloneBySerialization(T obj) {
+    if ( obj != null ) {
+      return (T) deserialize(serialize(obj));
+    } else {
+      return null;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/AbstractBackOffStrategy.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/AbstractBackOffStrategy.java b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/AbstractBackOffStrategy.java
index 7fbfc6b..3dc3e08 100644
--- a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/AbstractBackOffStrategy.java
+++ b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/AbstractBackOffStrategy.java
@@ -22,61 +22,64 @@ import java.util.concurrent.atomic.AtomicInteger;
  */
 public abstract class AbstractBackOffStrategy implements BackOffStrategy {
 
-    private long baseSleepTime;
-    private long lastSleepTime;
-    private int maxAttempts;
-    private AtomicInteger attemptsCount;
+  private long baseSleepTime;
+  private long lastSleepTime;
+  private int maxAttempts;
+  private AtomicInteger attemptsCount;
 
-    /**
-     * A BackOffStrategy that can effectively be used endlessly.
-     * @param baseBackOffTime amount of time back of in seconds
-     */
-    public AbstractBackOffStrategy(long baseBackOffTime) {
-        this(baseBackOffTime, -1);
-    }
+  /**
+   * A BackOffStrategy that can effectively be used endlessly.
+   * @param baseBackOffTime amount of time back of in seconds
+   */
+  public AbstractBackOffStrategy(long baseBackOffTime) {
+    this(baseBackOffTime, -1);
+  }
 
-    /**
-     * A BackOffStrategy that has a limited number of uses before it throws a {@link org.apache.streams.util.api.requests.backoff.BackOffException}
-     * @param baseBackOffTime time to back off in milliseconds, must be greater than 0.
-     * @param maximumNumberOfBackOffAttempts maximum number of attempts, must be grater than 0 or -1. -1 indicates there is no maximum number of attempts.
-     */
-    public AbstractBackOffStrategy(long baseBackOffTime, int maximumNumberOfBackOffAttempts) {
-        if(baseBackOffTime <= 0) {
-            throw new IllegalArgumentException("backOffTimeInMilliSeconds is not greater than 0 : "+baseBackOffTime);
-        }
-        if(maximumNumberOfBackOffAttempts<=0 && maximumNumberOfBackOffAttempts != -1) {
-            throw new IllegalArgumentException("maximumNumberOfBackOffAttempts is not greater than 0 : "+maximumNumberOfBackOffAttempts);
-        }
-        this.baseSleepTime = baseBackOffTime;
-        this.maxAttempts = maximumNumberOfBackOffAttempts;
-        this.attemptsCount = new AtomicInteger(0);
+  /**
+   * A BackOffStrategy that has a limited number of uses before it throws a
+   * {@link org.apache.streams.util.api.requests.backoff.BackOffException}.
+   * @param baseBackOffTime time to back off in milliseconds, must be greater than 0.
+   * @param maximumNumberOfBackOffAttempts maximum number of attempts, must be grater than 0 or -1.
+   *                                       -1 indicates there is no maximum number of attempts.
+   */
+  public AbstractBackOffStrategy(long baseBackOffTime, int maximumNumberOfBackOffAttempts) {
+    if (baseBackOffTime <= 0) {
+      throw new IllegalArgumentException("backOffTimeInMilliSeconds is not greater than 0 : " + baseBackOffTime);
     }
-
-    @Override
-    public void backOff() throws BackOffException {
-        int attempt = this.attemptsCount.getAndIncrement();
-        if(attempt >= this.maxAttempts && this.maxAttempts != -1) {
-            throw new BackOffException(attempt, this.lastSleepTime);
-        } else {
-            try {
-                Thread.sleep(this.lastSleepTime = calculateBackOffTime(attempt, this.baseSleepTime));
-            } catch (InterruptedException ie) {
-                Thread.currentThread().interrupt();
-            }
-        }
+    if (maximumNumberOfBackOffAttempts <= 0 && maximumNumberOfBackOffAttempts != -1) {
+      throw new IllegalArgumentException("maximumNumberOfBackOffAttempts is not greater than 0 : " + maximumNumberOfBackOffAttempts);
     }
+    this.baseSleepTime = baseBackOffTime;
+    this.maxAttempts = maximumNumberOfBackOffAttempts;
+    this.attemptsCount = new AtomicInteger(0);
+  }
 
-    @Override
-    public void reset() {
-        this.attemptsCount.set(0);
+  @Override
+  public void backOff() throws BackOffException {
+    int attempt = this.attemptsCount.getAndIncrement();
+    if (attempt >= this.maxAttempts && this.maxAttempts != -1) {
+      throw new BackOffException(attempt, this.lastSleepTime);
+    } else {
+      try {
+        Thread.sleep(this.lastSleepTime = calculateBackOffTime(attempt, this.baseSleepTime));
+      } catch (InterruptedException ie) {
+        Thread.currentThread().interrupt();
+      }
     }
+  }
+
+  @Override
+  public void reset() {
+    this.attemptsCount.set(0);
+  }
 
-    /**
-     * Calculate the amount of time in milliseconds that the strategy should back off for
-     * @param attemptCount the number of attempts the strategy has backed off. i.e. 1 -> this is the first attempt, 2 -> this is the second attempt, etc.
-     * @param baseSleepTime the minimum amount of time it should back off for in milliseconds
-     * @return the amount of time it should back off in milliseconds
-     */
-    protected abstract long calculateBackOffTime(int attemptCount, long baseSleepTime);
+  /**
+   * Calculate the amount of time in milliseconds that the strategy should back off for
+   * @param attemptCount the number of attempts the strategy has backed off.
+   *                     i.e. 1 -> this is the first attempt, 2 -> this is the second attempt, etc.
+   * @param baseSleepTime the minimum amount of time it should back off for in milliseconds
+   * @return the amount of time it should back off in milliseconds
+   */
+  protected abstract long calculateBackOffTime(int attemptCount, long baseSleepTime);
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffException.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffException.java b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffException.java
index 223303c..692c0b6 100644
--- a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffException.java
+++ b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffException.java
@@ -21,43 +21,49 @@ package org.apache.streams.util.api.requests.backoff;
  */
 public class BackOffException extends Exception {
 
-    private int attemptCount;
-    private long sleepTime;
-
-    public BackOffException() {
-        this(-1, -1);
-    }
-
-    public BackOffException(String message) {
-        this(message, -1, -1);
-    }
-
-    public BackOffException(int attemptCount, long maxSleepTime) {
-        this.attemptCount = attemptCount;
-        this.sleepTime = maxSleepTime;
-    }
-
-    public BackOffException(String message, int attemptCount, long maxSleepTime) {
-        super(message);
-        this.attemptCount = attemptCount;
-        this.sleepTime = maxSleepTime;
-    }
-
-    /**
-     * Gets the number of back off attempts that happened before the exception was thrown. If the function that
-     * initialized this exception does not set the number of attempts, -1 will be returned.
-     * @return number of attempts
-     */
-    public int getNumberOfBackOffsAttempted() {
-        return this.attemptCount;
-    }
-
-    /**
-     * Gets the longest sleep period that the strategy attempted. If the function that
-     * initialized this exception does not set the longest sleep period, -1 will be returned.
-     * @return
-     */
-    public long getLongestBackOff() {
-        return this.sleepTime;
-    }
+  private int attemptCount;
+  private long sleepTime;
+
+  public BackOffException() {
+    this(-1, -1);
+  }
+
+  public BackOffException(String message) {
+    this(message, -1, -1);
+  }
+
+  public BackOffException(int attemptCount, long maxSleepTime) {
+    this.attemptCount = attemptCount;
+    this.sleepTime = maxSleepTime;
+  }
+
+  /**
+   * BackOffException constructor.
+   * @param message message
+   * @param attemptCount attemptCount
+   * @param maxSleepTime maxSleepTime (in millis)
+   */
+  public BackOffException(String message, int attemptCount, long maxSleepTime) {
+    super(message);
+    this.attemptCount = attemptCount;
+    this.sleepTime = maxSleepTime;
+  }
+
+  /**
+   * Gets the number of back off attempts that happened before the exception was thrown. If the function that
+   * initialized this exception does not set the number of attempts, -1 will be returned.
+   * @return number of back off attempts
+   */
+  public int getNumberOfBackOffsAttempted() {
+    return this.attemptCount;
+  }
+
+  /**
+   * Gets the longest sleep period that the strategy attempted. If the function that
+   * initialized this exception does not set the longest sleep period, -1 will be returned.
+   * @return longest sleep period that the strategy attempted
+   */
+  public long getLongestBackOff() {
+    return this.sleepTime;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffStrategy.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffStrategy.java b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffStrategy.java
index a0d80e8..44497ab 100644
--- a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffStrategy.java
+++ b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/BackOffStrategy.java
@@ -19,6 +19,7 @@ package org.apache.streams.util.api.requests.backoff;
  * BackOffStrategy will cause the current thread to sleep for a specific amount of time. This is used to adhere to
  * api rate limits.
  *
+ * <p/>
  * The example below illustrates using a BackOffStrategy to slow down requests when you hit a rate limit exception.
  *
  * <code>
@@ -36,16 +37,17 @@ package org.apache.streams.util.api.requests.backoff;
  */
 public interface BackOffStrategy {
 
-    /**
-     * Cause the current thread to sleep for an amount of time based on the implemented strategy. If limits are set
-     * on the number of times the backOff can be called, an exception will be thrown.
-     * @throws BackOffException
-     */
-    public void backOff() throws BackOffException;
+  /**
+   * Cause the current thread to sleep for an amount of time based on the implemented strategy. If limits are set
+   * on the number of times the backOff can be called, an exception will be thrown.
+   * @throws BackOffException BackOffException
+   */
+  public void backOff() throws BackOffException;
 
-    /**
-     * Rests the back off strategy to its original state.  After the call the strategy will act as if {@link AbstractBackOffStrategy#backOff()}
-     * has never been called.
-     */
-    public void reset();
+  /**
+   * Rests the back off strategy to its original state.
+   * After the call the strategy will act as if {@link AbstractBackOffStrategy#backOff()}
+   * has never been called.
+   */
+  public void reset();
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ConstantTimeBackOffStrategy.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ConstantTimeBackOffStrategy.java b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ConstantTimeBackOffStrategy.java
index b3fd3f2..26ec225 100644
--- a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ConstantTimeBackOffStrategy.java
+++ b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ConstantTimeBackOffStrategy.java
@@ -24,25 +24,27 @@ import org.apache.streams.util.api.requests.backoff.AbstractBackOffStrategy;
  */
 public class ConstantTimeBackOffStrategy extends AbstractBackOffStrategy {
 
-    /**
-     * A ConstantTimeBackOffStrategy that can effectively be used endlessly.
-     * @param baseBackOffTimeInMiliseconds amount of time back of in milliseconds
-     */
-    public ConstantTimeBackOffStrategy(long baseBackOffTimeInMiliseconds) {
-        this(baseBackOffTimeInMiliseconds, -1);
-    }
+  /**
+   * A ConstantTimeBackOffStrategy that can effectively be used endlessly.
+   * @param baseBackOffTimeInMiliseconds amount of time back of in milliseconds
+   */
+  public ConstantTimeBackOffStrategy(long baseBackOffTimeInMiliseconds) {
+    this(baseBackOffTimeInMiliseconds, -1);
+  }
 
-    /**
-     * A ConstantTimeBackOffStrategy that has a limited number of uses before it throws a {@link org.apache.streams.util.api.requests.backoff.BackOffException}
-     * @param baseBackOffTimeInMiliseconds time to back off in milliseconds, must be greater than 0.
-     * @param maximumNumberOfBackOffAttempts maximum number of attempts, must be grater than 0 or -1. -1 indicates there is no maximum number of attempts.
-     */
-    public ConstantTimeBackOffStrategy(long baseBackOffTimeInMiliseconds, int maximumNumberOfBackOffAttempts) {
-        super(baseBackOffTimeInMiliseconds, maximumNumberOfBackOffAttempts);
-    }
+  /**
+   * A ConstantTimeBackOffStrategy that has a limited number of uses before it
+   * throws a {@link org.apache.streams.util.api.requests.backoff.BackOffException}
+   * @param baseBackOffTimeInMiliseconds time to back off in milliseconds, must be greater than 0.
+   * @param maximumNumberOfBackOffAttempts maximum number of attempts, must be grater than 0 or -1.
+   *                                       -1 indicates there is no maximum number of attempts.
+   */
+  public ConstantTimeBackOffStrategy(long baseBackOffTimeInMiliseconds, int maximumNumberOfBackOffAttempts) {
+    super(baseBackOffTimeInMiliseconds, maximumNumberOfBackOffAttempts);
+  }
 
-    @Override
-    protected long calculateBackOffTime(int attemptCount, long baseSleepTime) {
-        return baseSleepTime;
-    }
+  @Override
+  protected long calculateBackOffTime(int attemptCount, long baseSleepTime) {
+    return baseSleepTime;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ExponentialBackOffStrategy.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ExponentialBackOffStrategy.java b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ExponentialBackOffStrategy.java
index a5a9656..0962984 100644
--- a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ExponentialBackOffStrategy.java
+++ b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/ExponentialBackOffStrategy.java
@@ -18,30 +18,29 @@ package org.apache.streams.util.api.requests.backoff.impl;
 import org.apache.streams.util.api.requests.backoff.AbstractBackOffStrategy;
 
 /**
- * Exponential backk strategy.  Caluclated by baseBackOffTimeInSeconds raised the attempt-count power.
+ * Exponential backoff strategy.  Calculated by baseBackOffTimeInSeconds raised the attempt-count power.
  */
 public class ExponentialBackOffStrategy extends AbstractBackOffStrategy {
 
+  /**
+   * Unlimited use ExponentialBackOffStrategy.
+   * @param baseBackOffTimeInSeconds baseBackOffTimeInSeconds
+   */
+  public ExponentialBackOffStrategy(int baseBackOffTimeInSeconds) {
+    this(baseBackOffTimeInSeconds, -1);
+  }
 
-    /**
-     * Unlimited use ExponentialBackOffStrategy
-     * @param baseBackOffTimeInSeconds
-     */
-    public ExponentialBackOffStrategy(int baseBackOffTimeInSeconds) {
-        this(baseBackOffTimeInSeconds, -1);
-    }
+  /**
+   * Limited use ExponentialBackOffStrategy.
+   * @param baseBackOffTimeInSeconds baseBackOffTimeInSeconds
+   * @param maxNumAttempts maxNumAttempts
+   */
+  public ExponentialBackOffStrategy(int baseBackOffTimeInSeconds, int maxNumAttempts) {
+    super(baseBackOffTimeInSeconds, maxNumAttempts);
+  }
 
-    /**
-     * Limited use ExponentialBackOffStrategy
-     * @param baseBackOffTimeInSeconds
-     * @param maxNumAttempts
-     */
-    public ExponentialBackOffStrategy(int baseBackOffTimeInSeconds, int maxNumAttempts) {
-        super(baseBackOffTimeInSeconds, maxNumAttempts);
-    }
-
-    @Override
-    protected long calculateBackOffTime(int attemptCount, long baseSleepTime) {
-        return Math.round(Math.pow(baseSleepTime, attemptCount)) * 1000;
-    }
+  @Override
+  protected long calculateBackOffTime(int attemptCount, long baseSleepTime) {
+    return Math.round(Math.pow(baseSleepTime, attemptCount)) * 1000;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/LinearTimeBackOffStrategy.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/LinearTimeBackOffStrategy.java b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/LinearTimeBackOffStrategy.java
index 38d05a1..d6f323f 100644
--- a/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/LinearTimeBackOffStrategy.java
+++ b/streams-util/src/main/java/org/apache/streams/util/api/requests/backoff/impl/LinearTimeBackOffStrategy.java
@@ -24,17 +24,16 @@ import org.apache.streams.util.api.requests.backoff.AbstractBackOffStrategy;
  */
 public class LinearTimeBackOffStrategy extends AbstractBackOffStrategy {
 
+  public LinearTimeBackOffStrategy(int baseBackOffTimeInSeconds) {
+    this(baseBackOffTimeInSeconds, -1);
+  }
 
-    public LinearTimeBackOffStrategy(int baseBackOffTimeInSeconds) {
-        this(baseBackOffTimeInSeconds, -1);
-    }
+  public LinearTimeBackOffStrategy(int baseBackOffTimeInSeconds, int maxAttempts) {
+    super(baseBackOffTimeInSeconds, -1);
+  }
 
-    public LinearTimeBackOffStrategy(int baseBackOffTimeInSeconds, int maxAttempts) {
-        super(baseBackOffTimeInSeconds, -1);
-    }
-
-    @Override
-    protected long calculateBackOffTime(int attemptCount, long baseSleepTime) {
-        return 1000L * attemptCount * baseSleepTime;
-    }
+  @Override
+  protected long calculateBackOffTime(int attemptCount, long baseSleepTime) {
+    return 1000L * attemptCount * baseSleepTime;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/AbstractOauthToken.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/AbstractOauthToken.java b/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/AbstractOauthToken.java
index dfdec72..41ec4b6 100644
--- a/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/AbstractOauthToken.java
+++ b/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/AbstractOauthToken.java
@@ -12,22 +12,23 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
+
 package org.apache.streams.util.oauth.tokens;
 
 /**
- *
+ * AbstractOauthToken.
  */
 public abstract class AbstractOauthToken {
 
-    /**
-     * Must create equals method for all OauthTokens.
-     * @param o
-     * @return true if equal, and false otherwise
-     */
-    protected abstract boolean internalEquals(Object o);
+  /**
+   * Must create equals method for all OauthTokens.
+   * @param object object for comparison
+   * @return true if equal, and false otherwise
+   */
+  protected abstract boolean internalEquals(Object object);
 
-    @Override
-    public boolean equals(Object o) {
-        return this.internalEquals(o);
-    }
+  @Override
+  public boolean equals(Object object) {
+    return this.internalEquals(object);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/SimpleTokenManager.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/SimpleTokenManager.java b/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/SimpleTokenManager.java
index fed194f..7b3f370 100644
--- a/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/SimpleTokenManager.java
+++ b/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/SimpleTokenManager.java
@@ -12,42 +12,40 @@ software distributed under the License is distributed on an
 KIND, either express or implied. See the License for the
 specific language governing permissions and limitations
 under the License. */
-package org.apache.streams.util.oauth.tokens.tokenmanager;
 
-import org.apache.streams.util.oauth.tokens.AbstractOauthToken;
+package org.apache.streams.util.oauth.tokens.tokenmanager;
 
 import java.util.Collection;
 
 /**
- * Manges access to oauth tokens.  Allows a caller to add tokens to the token pool and receive an available token.
+ * Manages access to oauth tokens.  Allows a caller to add tokens to the token pool and receive an available token.
  */
 public interface SimpleTokenManager<T> {
 
-
-    /**
-     * Adds a token to the available token pool.
-     * @param token Token to be added
-     * @return true, if token was successfully added to the pool and false otherwise.
-     */
-    public boolean addTokenToPool(T token);
-
-    /**
-     * Adds a {@link java.util.Collection} of tokens to the available token pool.
-     * @param tokens Tokens to be added
-     * @return true, if the token pool size increased after adding the tokens, and false otherwise.
-     */
-    public boolean addAllTokensToPool(Collection<T> tokens);
-
-    /**
-     * Get an available token. If no tokens are available it returns null.
-     * @return next available token
-     */
-    public T getNextAvailableToken();
-
-    /**
-     * Get the number of available tokens
-     * @return number of available tokens
-     */
-    public int numAvailableTokens();
+  /**
+   * Adds a token to the available token pool.
+   * @param token Token to be added
+   * @return true, if token was successfully added to the pool and false otherwise.
+   */
+  public boolean addTokenToPool(T token);
+
+  /**
+   * Adds a {@link java.util.Collection} of tokens to the available token pool.
+   * @param tokens Tokens to be added
+   * @return true, if the token pool size increased after adding the tokens, and false otherwise.
+   */
+  public boolean addAllTokensToPool(Collection<T> tokens);
+
+  /**
+   * Get an available token. If no tokens are available it returns null.
+   * @return next available token
+   */
+  public T getNextAvailableToken();
+
+  /**
+   * Get the number of available tokens.
+   * @return number of available tokens
+   */
+  public int numAvailableTokens();
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/impl/BasicTokenManager.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/impl/BasicTokenManager.java b/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/impl/BasicTokenManager.java
new file mode 100644
index 0000000..7c1a9e3
--- /dev/null
+++ b/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/impl/BasicTokenManager.java
@@ -0,0 +1,94 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance *
+http://www.apache.org/licenses/LICENSE-2.0 *
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied. See the License for the
+specific language governing permissions and limitations
+under the License. */
+
+package org.apache.streams.util.oauth.tokens.tokenmanager.impl;
+
+import org.apache.streams.util.oauth.tokens.tokenmanager.SimpleTokenManager;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+/**
+ * Manages a pool of tokens the most basic possible way.
+ * If all tokens are added to the manager before {@link BasicTokenManager#getNextAvailableToken() getNextAvailableToken}
+ * is called tokens are issued in the order they were added to the manager, FIFO.  The BasicTokenManager acts as a circular queue
+ * of tokens.  Once the manager issues all available tokens it will cycle back to the first token and start issuing tokens again.
+ *
+ * </p>
+ * When adding tokens to the pool of available tokens, the manager will not add tokens that are already in the pool.
+ *
+ * <p/>
+ * The manager class is thread safe.
+ */
+public class BasicTokenManager<T> implements SimpleTokenManager<T> {
+
+  private ArrayList<T> availableTokens;
+  private int nextToken;
+
+  public BasicTokenManager() {
+    this(null);
+  }
+
+  /**
+   * BasicTokenManager constructor.
+   * @param tokens Collection of tokens
+   */
+  public BasicTokenManager(Collection<T> tokens) {
+    if (tokens != null) {
+      this.availableTokens = new ArrayList<T>(tokens.size());
+      this.addAllTokensToPool(tokens);
+    } else {
+      this.availableTokens = new ArrayList<T>();
+    }
+    this.nextToken = 0;
+  }
+
+  @Override
+  public synchronized boolean addTokenToPool(T token) {
+    if (token == null || this.availableTokens.contains(token)) {
+      return false;
+    } else {
+      return this.availableTokens.add(token);
+    }
+  }
+
+  @Override
+  public synchronized boolean addAllTokensToPool(Collection<T> tokens) {
+    int startSize = this.availableTokens.size();
+    for (T token : tokens) {
+      this.addTokenToPool(token);
+    }
+    return startSize < this.availableTokens.size();
+  }
+
+  @Override
+  public synchronized T getNextAvailableToken() {
+    T token = null;
+    if (this.availableTokens.size() == 0) {
+      return token;
+    } else {
+      token = this.availableTokens.get(nextToken++);
+      if (nextToken == this.availableTokens.size()) {
+        nextToken = 0;
+      }
+      return token;
+    }
+  }
+
+  @Override
+  public synchronized int numAvailableTokens() {
+    return this.availableTokens.size();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/impl/BasicTokenManger.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/impl/BasicTokenManger.java b/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/impl/BasicTokenManger.java
deleted file mode 100644
index 4c64bf7..0000000
--- a/streams-util/src/main/java/org/apache/streams/util/oauth/tokens/tokenmanager/impl/BasicTokenManger.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance *
-http://www.apache.org/licenses/LICENSE-2.0 *
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied. See the License for the
-specific language governing permissions and limitations
-under the License. */
-package org.apache.streams.util.oauth.tokens.tokenmanager.impl;
-
-import org.apache.streams.util.oauth.tokens.AbstractOauthToken;
-import org.apache.streams.util.oauth.tokens.tokenmanager.SimpleTokenManager;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-/**
- * Manages a pool of tokens the most basic possible way.  If all tokens are added to the manager before {@link BasicTokenManger#getNextAvailableToken() getNextAvailableToken}
- * is called tokens are issued in the order they were added to the manager, FIFO.  The BasicTokenManager acts as a circular queue
- * of tokens.  Once the manager issues all available tokens it will cycle back to the first token and start issuing tokens again.
- *
- * When adding tokens to the pool of available tokens, the manager will not add tokens that are already in the pool.
- *
- * The manager class is thread safe.
- */
-public class BasicTokenManger<T> implements SimpleTokenManager<T>{
-
-    private ArrayList<T> availableTokens;
-    private int nextToken;
-
-    public BasicTokenManger() {
-        this(null);
-    }
-
-    public BasicTokenManger(Collection<T> tokens) {
-        if(tokens != null) {
-            this.availableTokens = new ArrayList<T>(tokens.size());
-            this.addAllTokensToPool(tokens);
-        } else {
-            this.availableTokens = new ArrayList<T>();
-        }
-        this.nextToken = 0;
-    }
-
-    @Override
-    public synchronized boolean addTokenToPool(T token) {
-        if(token == null || this.availableTokens.contains(token))
-            return false;
-        else
-            return this.availableTokens.add(token);
-    }
-
-    @Override
-    public synchronized boolean addAllTokensToPool(Collection<T> tokens) {
-        int startSize = this.availableTokens.size();
-        for(T token : tokens) {
-            this.addTokenToPool(token);
-        }
-        return startSize < this.availableTokens.size();
-    }
-
-    @Override
-    public synchronized T getNextAvailableToken() {
-        T token = null;
-        if(this.availableTokens.size() == 0) {
-            return token;
-        } else {
-            token = this.availableTokens.get(nextToken++);
-            if(nextToken == this.availableTokens.size()) {
-                nextToken = 0;
-            }
-            return token;
-        }
-    }
-
-    @Override
-    public synchronized int numAvailableTokens() {
-        return this.availableTokens.size();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/FieldType.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/FieldType.java b/streams-util/src/main/java/org/apache/streams/util/schema/FieldType.java
index 450851e..57a1d44 100644
--- a/streams-util/src/main/java/org/apache/streams/util/schema/FieldType.java
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/FieldType.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema;
 
 /**
@@ -22,10 +23,10 @@ package org.apache.streams.util.schema;
  * be able to translate.
  */
 public enum FieldType {
-    STRING,
-    INTEGER,
-    NUMBER,
-    BOOLEAN,
-    OBJECT,
-    ARRAY
+  STRING,
+  INTEGER,
+  NUMBER,
+  BOOLEAN,
+  OBJECT,
+  ARRAY
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/FieldUtil.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/FieldUtil.java b/streams-util/src/main/java/org/apache/streams/util/schema/FieldUtil.java
index 6582565..a437ca4 100644
--- a/streams-util/src/main/java/org/apache/streams/util/schema/FieldUtil.java
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/FieldUtil.java
@@ -15,11 +15,9 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.node.JsonNodeFactory;
 import com.fasterxml.jackson.databind.node.ObjectNode;
 
 /**
@@ -27,25 +25,32 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
  */
 public class FieldUtil {
 
-    public static FieldType determineFieldType(ObjectNode fieldNode) {
-        String typeSchemaField = "type";
-        if( !fieldNode.has(typeSchemaField))
-            return null;
-        String typeSchemaFieldValue = fieldNode.get(typeSchemaField).asText();
-        if( typeSchemaFieldValue.equals("string")) {
-            return FieldType.STRING;
-        } else if( typeSchemaFieldValue.equals("integer")) {
-            return FieldType.INTEGER;
-        } else if( typeSchemaFieldValue.equals("number")) {
-            return FieldType.NUMBER;
-        } else if( typeSchemaFieldValue.equals("object")) {
-            return FieldType.OBJECT;
-        } else if( typeSchemaFieldValue.equals("boolean")) {
-            return FieldType.BOOLEAN;
-        } else if( typeSchemaFieldValue.equals("array")) {
-            return FieldType.ARRAY;
-        }
-        else return null;
+  /**
+   * determine FieldType from ObjectNode.
+   * @param fieldNode ObjectNode
+   * @return FieldType
+   */
+  public static FieldType determineFieldType(ObjectNode fieldNode) {
+    String typeSchemaField = "type";
+    if ( !fieldNode.has(typeSchemaField)) {
+      return null;
+    }
+    String typeSchemaFieldValue = fieldNode.get(typeSchemaField).asText();
+    if ( typeSchemaFieldValue.equals("string")) {
+      return FieldType.STRING;
+    } else if ( typeSchemaFieldValue.equals("integer")) {
+      return FieldType.INTEGER;
+    } else if ( typeSchemaFieldValue.equals("number")) {
+      return FieldType.NUMBER;
+    } else if ( typeSchemaFieldValue.equals("object")) {
+      return FieldType.OBJECT;
+    } else if ( typeSchemaFieldValue.equals("boolean")) {
+      return FieldType.BOOLEAN;
+    } else if ( typeSchemaFieldValue.equals("array")) {
+      return FieldType.ARRAY;
+    } else {
+      return null;
     }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/FileUtil.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/FileUtil.java b/streams-util/src/main/java/org/apache/streams/util/schema/FileUtil.java
index c51339a..5acd5a8 100644
--- a/streams-util/src/main/java/org/apache/streams/util/schema/FileUtil.java
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/FileUtil.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema;
 
 import com.google.common.base.Preconditions;
@@ -35,60 +36,93 @@ import java.util.List;
  */
 public class FileUtil {
 
-    private final static Logger LOGGER = LoggerFactory.getLogger(FileUtil.class);
+  private static final Logger LOGGER = LoggerFactory.getLogger(FileUtil.class);
 
-    public static String dropSourcePathPrefix(String inputFile, String sourceDirectory) {
-        if(Strings.isNullOrEmpty(sourceDirectory))
-            return inputFile;
-        else {
-            try {
-                if( inputFile.contains(sourceDirectory) && inputFile.indexOf(sourceDirectory) > 0) {
-                    return inputFile.substring(inputFile.indexOf(sourceDirectory)+sourceDirectory.length()+1);
-                }
-            } catch( Throwable e ) {
-                return inputFile;
-            }
+  /**
+   * drop source path prefix between inputFile and sourceDirectory.
+   * @param inputFile inputFile
+   * @param sourceDirectory sourceDirectory
+   * @return without path prefix
+   */
+  public static String dropSourcePathPrefix(String inputFile, String sourceDirectory) {
+    if (Strings.isNullOrEmpty(sourceDirectory)) {
+      return inputFile;
+    } else {
+      try {
+        if ( inputFile.contains(sourceDirectory) && inputFile.indexOf(sourceDirectory) > 0) {
+          return inputFile.substring(inputFile.indexOf(sourceDirectory) + sourceDirectory.length() + 1);
         }
+      } catch ( Throwable throwable ) {
         return inputFile;
+      }
     }
+    return inputFile;
+  }
 
-    public static String swapExtension(String inputFile, String originalExtension, String newExtension) {
-        if(inputFile.endsWith("."+originalExtension))
-            return inputFile.replace("."+originalExtension, "."+newExtension);
-        else return inputFile;
+  /**
+   * swapExtension.
+   * @param inputFile inputFile
+   * @param originalExtension originalExtension
+   * @param newExtension newExtension
+   * @return extension swapped
+   */
+  public static String swapExtension(String inputFile, String originalExtension, String newExtension) {
+    if (inputFile.endsWith("." + originalExtension)) {
+      return inputFile.replace("." + originalExtension, "." + newExtension);
+    } else {
+      return inputFile;
     }
+  }
 
-    public static String dropExtension(String inputFile) {
-        if(inputFile.contains("."))
-            return inputFile.substring(0, inputFile.lastIndexOf("."));
-        else return inputFile;
+  /**
+   * dropExtension.
+   * @param inputFile inputFile
+   * @return extension dropped
+   */
+  public static String dropExtension(String inputFile) {
+    if (inputFile.contains(".")) {
+      return inputFile.substring(0, inputFile.lastIndexOf("."));
+    } else {
+      return inputFile;
     }
+  }
 
-    public static void writeFile(String resourceFile, String resourceContent) {
-        try {
-            File path = new File(resourceFile);
-            File dir = path.getParentFile();
-            if( !dir.exists() )
-                dir.mkdirs();
-            Files.write(Paths.get(resourceFile), resourceContent.getBytes(), StandardOpenOption.CREATE_NEW);
-        } catch (Exception e) {
-            LOGGER.error("Write Exception: {}", e);
-        }
+  /**
+   * writeFile.
+   * @param resourceFile resourceFile
+   * @param resourceContent resourceContent
+   */
+  public static void writeFile(String resourceFile, String resourceContent) {
+    try {
+      File path = new File(resourceFile);
+      File dir = path.getParentFile();
+      if ( !dir.exists() ) {
+        dir.mkdirs();
+      }
+      Files.write(Paths.get(resourceFile), resourceContent.getBytes(), StandardOpenOption.CREATE_NEW);
+    } catch (Exception ex) {
+      LOGGER.error("Write Exception: {}", ex);
     }
+  }
 
-    public static void resolveRecursive(GenerationConfig config, List<File> schemaFiles) {
-
-        Preconditions.checkArgument(schemaFiles.size() > 0);
-        int i = 0;
-        while( schemaFiles.size() > i) {
-            File child = schemaFiles.get(i);
-            if (child.isDirectory()) {
-                schemaFiles.addAll(Arrays.asList(child.listFiles(config.getFileFilter())));
-                schemaFiles.remove(child);
-            } else {
-                i += 1;
-            }
-        }
+  /**
+   * resolveRecursive.
+   * @param config GenerationConfig
+   * @param schemaFiles List of schemaFiles
+   */
+  public static void resolveRecursive(GenerationConfig config, List<File> schemaFiles) {
 
+    Preconditions.checkArgument(schemaFiles.size() > 0);
+    int index = 0;
+    while ( schemaFiles.size() > index) {
+      File child = schemaFiles.get(index);
+      if (child.isDirectory()) {
+        schemaFiles.addAll(Arrays.asList(child.listFiles(config.getFileFilter())));
+        schemaFiles.remove(child);
+      } else {
+        index += 1;
+      }
     }
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-streams/blob/5dffd5c3/streams-util/src/main/java/org/apache/streams/util/schema/GenerationConfig.java
----------------------------------------------------------------------
diff --git a/streams-util/src/main/java/org/apache/streams/util/schema/GenerationConfig.java b/streams-util/src/main/java/org/apache/streams/util/schema/GenerationConfig.java
index c48d186..d7fa7e7 100644
--- a/streams-util/src/main/java/org/apache/streams/util/schema/GenerationConfig.java
+++ b/streams-util/src/main/java/org/apache/streams/util/schema/GenerationConfig.java
@@ -15,6 +15,7 @@
  * specific language governing permissions and limitations
  * under the License.
  */
+
 package org.apache.streams.util.schema;
 
 import java.io.File;
@@ -24,110 +25,39 @@ import java.util.Iterator;
 
 /**
  * GenerationConfig represents the common fields and field accessors for
- * streams modules that transform schemas into generated-sources or generated-resources
+ * streams modules that transform schemas into generated-sources or generated-resources.
  */
 public interface GenerationConfig {
 
-    /**
-     * Gets the 'source' configuration option.
-     *
-     * @return The source file(s) or directory(ies) from which JSON Schema will
-     *         be read.
-     */
-    Iterator<URL> getSource();
-
-    /**
-     * Gets the 'targetDirectory' configuration option.
-     *
-     * @return The target directory into which generated types will be written
-     *         (may or may not exist before types are written)
-     */
-    File getTargetDirectory();
-
-    /**
-     * Gets the 'outputEncoding' configuration option.
-     *
-     * @return The character encoding that should be used when writing output files.
-     */
-    String getOutputEncoding();
-
-    /**
-     * Gets the file filter used to isolate the schema mapping files in the
-     * source directories.
-     *
-     * @return the file filter use when scanning for schema files.
-     */
-    FileFilter getFileFilter();
-
-    /**
-     * Gets the 'includeAdditionalProperties' configuration option.
-     *
-     * @return Whether to allow 'additional properties' support in objects.
-     *         Setting this to false will disable additional properties support,
-     *         regardless of the input schema(s).
-     */
-//    boolean isIncludeAdditionalProperties();
-
-    /**
-     * Gets the 'targetVersion' configuration option.
-     *
-     *  @return The target version for generated source files.
-     */
-//    String getTargetVersion();
-
-//    /**
-//     * Gets the `includeDynamicAccessors` configuraiton option.
-//     *
-//     * @return Whether to include dynamic getters, setters, and builders
-//     *         or to omit these methods.
-//     */
-//    boolean isIncludeDynamicAccessors();
-
-//    /**
-//     * Gets the `dateTimeType` configuration option.
-//     *         <p>
-//     *         Example values:
-//     *         <ul>
-//     *         <li><code>org.joda.time.LocalDateTime</code> (Joda)</li>
-//     *         <li><code>java.time.LocalDateTime</code> (JSR310)</li>
-//     *         <li><code>null</code> (default behavior)</li>
-//     *         </ul>
-//     *
-//     * @return The java type to use instead of {@link java.util.Date}
-//     *         when adding date type fields to generate Java types.
-//     */
-//    String getDateTimeType();
-//
-//    /**
-//     * Gets the `dateType` configuration option.
-//     *         <p>
-//     *         Example values:
-//     *         <ul>
-//     *         <li><code>org.joda.time.LocalDate</code> (Joda)</li>
-//     *         <li><code>java.time.LocalDate</code> (JSR310)</li>
-//     *         <li><code>null</code> (default behavior)</li>
-//     *         </ul>
-//     *
-//     * @return The java type to use instead of string
-//     *         when adding string type fields with a format of date (not
-//     *         date-time) to generated Java types.
-//     */
-//    String getDateType();
-//
-//    /**
-//     * Gets the `timeType` configuration option.
-//     *         <p>
-//     *         Example values:
-//     *         <ul>
-//     *         <li><code>org.joda.time.LocalTime</code> (Joda)</li>
-//     *         <li><code>java.time.LocalTime</code> (JSR310)</li>
-//     *         <li><code>null</code> (default behavior)</li>
-//     *         </ul>
-//     *
-//     * @return The java type to use instead of string
-//     *         when adding string type fields with a format of time (not
-//     *         date-time) to generated Java types.
-//     */
-//    String getTimeType();
+  /**
+   * Gets the 'source' configuration option.
+   *
+   * @return The source file(s) or directory(ies) from which JSON Schema will
+   *         be read.
+   */
+  Iterator<URL> getSource();
+
+  /**
+   * Gets the 'targetDirectory' configuration option.
+   *
+   * @return The target directory into which generated types will be written
+   *         (may or may not exist before types are written)
+   */
+  File getTargetDirectory();
+
+  /**
+   * Gets the 'outputEncoding' configuration option.
+   *
+   * @return The character encoding that should be used when writing output files.
+   */
+  String getOutputEncoding();
+
+  /**
+   * Gets the file filter used to isolate the schema mapping files in the
+   * source directories.
+   *
+   * @return the file filter use when scanning for schema files.
+   */
+  FileFilter getFileFilter();
 
 }