You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@apex.apache.org by vr...@apache.org on 2015/09/24 04:37:20 UTC

[01/50] [abbrv] incubator-apex-core git commit: Merge branch 'gaurav-async-checkpoint' into devel-3

Repository: incubator-apex-core
Updated Branches:
  refs/heads/feature-module 9116c7033 -> 507fac34b


Merge branch 'gaurav-async-checkpoint' into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/09f716e0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/09f716e0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/09f716e0

Branch: refs/heads/feature-module
Commit: 09f716e004ef5b8cb990d94f8a8a10f65c6f4f0b
Parents: e512610 243d5af
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Wed Sep 9 00:54:32 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Wed Sep 9 00:54:32 2015 -0700

----------------------------------------------------------------------
 .../common/util/AsyncFSStorageAgent.java        |  8 ++++-
 .../java/com/datatorrent/stram/engine/Node.java | 31 ++++++++++----------
 .../stram/plan/physical/PhysicalPlan.java       |  5 +++-
 3 files changed, 27 insertions(+), 17 deletions(-)
----------------------------------------------------------------------



[42/50] [abbrv] incubator-apex-core git commit: Merge branch 'maven-enforcer-plugin' of https://github.com/vrozov/incubator-apex-core into devel-3

Posted by vr...@apache.org.
Merge branch 'maven-enforcer-plugin' of https://github.com/vrozov/incubator-apex-core into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/041beb9f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/041beb9f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/041beb9f

Branch: refs/heads/feature-module
Commit: 041beb9fb5e38dc628db9b9a7cfaed8027824e0d
Parents: 454fecc b718783
Author: MalharJenkins <je...@datatorrent.com>
Authored: Wed Sep 16 22:11:43 2015 -0700
Committer: MalharJenkins <je...@datatorrent.com>
Committed: Wed Sep 16 22:11:43 2015 -0700

----------------------------------------------------------------------
 pom.xml | 72 +++++++++++++++++++++++++++++++++++++++++-------------------
 1 file changed, 49 insertions(+), 23 deletions(-)
----------------------------------------------------------------------



[34/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-122' of github.com:vrozov/incubator-apex-core into vlad-no-snapshot

Posted by vr...@apache.org.
Merge branch 'APEX-122' of github.com:vrozov/incubator-apex-core into vlad-no-snapshot


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/7503dde5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/7503dde5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/7503dde5

Branch: refs/heads/feature-module
Commit: 7503dde513489c74bad44c4e81e04bbafcfdfaff
Parents: cecdf4c 8578a71
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Tue Sep 15 14:00:52 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Tue Sep 15 14:00:52 2015 -0700

----------------------------------------------------------------------
 api/pom.xml | 2 +-
 pom.xml     | 6 +++++-
 2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------



[27/50] [abbrv] incubator-apex-core git commit: Fix version.

Posted by vr...@apache.org.
Fix version.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/c0baa9d9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/c0baa9d9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/c0baa9d9

Branch: refs/heads/feature-module
Commit: c0baa9d9bda1ffabba0bbc24f5dcb14b741b9eb4
Parents: 91e63b5
Author: Thomas Weise <th...@datatorrent.com>
Authored: Mon Sep 14 15:42:35 2015 -0700
Committer: Thomas Weise <th...@datatorrent.com>
Committed: Mon Sep 14 15:42:35 2015 -0700

----------------------------------------------------------------------
 bufferserver/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/c0baa9d9/bufferserver/pom.xml
----------------------------------------------------------------------
diff --git a/bufferserver/pom.xml b/bufferserver/pom.xml
index b09db32..ffd6ca3 100644
--- a/bufferserver/pom.xml
+++ b/bufferserver/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>${project.version}</version>
+    <version>3.2.0-SNAPSHOT</version>
   </parent>
 
   <artifactId>dt-bufferserver</artifactId>
@@ -28,7 +28,7 @@
     <dependency>
       <groupId>com.datatorrent</groupId>
       <artifactId>dt-common</artifactId>
-      <version>3.2.0-SNAPSHOT</version>
+      <version>${project.version}</version>
       <type>jar</type>
     </dependency>
   </dependencies>


[22/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-121' of github.com:gauravgopi123/incubator-apex-core into devel-3

Posted by vr...@apache.org.
Merge branch 'APEX-121' of github.com:gauravgopi123/incubator-apex-core into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/efaa8f28
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/efaa8f28
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/efaa8f28

Branch: refs/heads/feature-module
Commit: efaa8f282e2404344f95cb0f5ce9d401e28391de
Parents: 760039e 89aefea
Author: Pramod Immaneni <pr...@datatorrent.com>
Authored: Mon Sep 14 10:10:34 2015 -0700
Committer: Pramod Immaneni <pr...@datatorrent.com>
Committed: Mon Sep 14 10:10:34 2015 -0700

----------------------------------------------------------------------
 .../com/datatorrent/stram/stream/BufferServerPublisher.java | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)
----------------------------------------------------------------------



[38/50] [abbrv] incubator-apex-core git commit: APEX-28 #resolve

Posted by vr...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java b/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
deleted file mode 100644
index 218156b..0000000
--- a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
+++ /dev/null
@@ -1,1788 +0,0 @@
-/**
- * Copyright (C) 2015 DataTorrent, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *         http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datatorrent.stram.plan;
-
-import com.datatorrent.api.*;
-import com.datatorrent.api.Attribute.AttributeMap.AttributeInitializer;
-import com.datatorrent.api.Context.DAGContext;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Context.PortContext;
-import com.datatorrent.api.StringCodec.Integer2String;
-import com.datatorrent.api.annotation.ApplicationAnnotation;
-import com.datatorrent.common.codec.JsonStreamCodec;
-import com.datatorrent.common.util.BasicContainerOptConfigurator;
-import com.datatorrent.common.util.FSStorageAgent;
-import com.datatorrent.stram.PartitioningTest.PartitionLoadWatch;
-import com.datatorrent.stram.client.StramClientUtils;
-import com.datatorrent.stram.engine.GenericTestOperator;
-import com.datatorrent.stram.engine.TestGeneratorInputOperator;
-import com.datatorrent.stram.plan.LogicalPlanTest.ValidationTestOperator;
-import com.datatorrent.stram.plan.logical.LogicalPlan;
-import com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta;
-import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
-import com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta;
-import com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta;
-import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration;
-import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.AttributeParseUtils;
-import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.ConfElement;
-import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.ContextUtils;
-import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.StramElement;
-import com.datatorrent.stram.plan.logical.MockStorageAgent;
-import com.datatorrent.stram.support.StramTestSupport.RegexMatcher;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.StringWriter;
-import java.lang.reflect.Field;
-import javax.validation.ValidationException;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.mutable.MutableBoolean;
-import org.apache.hadoop.conf.Configuration;
-import org.codehaus.jettison.json.JSONObject;
-import org.junit.Assert;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.*;
-
-import static org.junit.Assert.*;
-
-
-
-public class LogicalPlanConfigurationTest {
-
-  private static OperatorMeta assertNode(LogicalPlan dag, String id) {
-      OperatorMeta n = dag.getOperatorMeta(id);
-      assertNotNull("operator exists id=" + id, n);
-      return n;
-  }
-
-  /**
-   * Test read from dt-site.xml in Hadoop configuration format.
-   */
-  @Test
-  public void testLoadFromConfigXml() {
-    Configuration conf = new Configuration(false);
-    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
-
-    LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
-
-    LogicalPlan dag = new LogicalPlan();
-    builder.populateDAG(dag);
-    dag.validate();
-
-    assertEquals("number of operator confs", 6, dag.getAllOperators().size());
-
-    OperatorMeta operator1 = assertNode(dag, "operator1");
-    OperatorMeta operator2 = assertNode(dag, "operator2");
-    OperatorMeta operator3 = assertNode(dag, "operator3");
-    OperatorMeta operator4 = assertNode(dag, "operator4");
-
-    assertNotNull("operatorConf for root", operator1);
-    assertEquals("operatorId set", "operator1", operator1.getName());
-
-    // verify operator instantiation
-    assertEquals(operator1.getOperator().getClass(), TestGeneratorInputOperator.class);
-    TestGeneratorInputOperator GenericTestNode = (TestGeneratorInputOperator)operator1.getOperator();
-    assertEquals("myStringPropertyValue", GenericTestNode.getMyStringProperty());
-
-    // check links
-    assertEquals("operator1 inputs", 0, operator1.getInputStreams().size());
-    assertEquals("operator1 outputs", 1, operator1.getOutputStreams().size());
-    StreamMeta n1n2 = operator2.getInputStreams().get(operator2.getMeta(((GenericTestOperator)operator2.getOperator()).inport1));
-    assertNotNull("n1n2", n1n2);
-
-    // output/input stream object same
-    assertEquals("rootNode out is operator2 in", n1n2, operator1.getOutputStreams().get(operator1.getMeta(((TestGeneratorInputOperator)operator1.getOperator()).outport)));
-    assertEquals("n1n2 source", operator1, n1n2.getSource().getOperatorMeta());
-    Assert.assertEquals("n1n2 targets", 1, n1n2.getSinks().size());
-    Assert.assertEquals("n1n2 target", operator2, n1n2.getSinks().get(0).getOperatorWrapper());
-
-    assertEquals("stream name", "n1n2", n1n2.getName());
-    Assert.assertEquals("n1n2 not inline (default)", null, n1n2.getLocality());
-
-    // operator 2 streams to operator 3 and operator 4
-    assertEquals("operator 2 number of outputs", 1, operator2.getOutputStreams().size());
-    StreamMeta fromNode2 = operator2.getOutputStreams().values().iterator().next();
-
-    Set<OperatorMeta> targetNodes = Sets.newHashSet();
-    for (LogicalPlan.InputPortMeta ip : fromNode2.getSinks()) {
-      targetNodes.add(ip.getOperatorWrapper());
-    }
-    Assert.assertEquals("outputs " + fromNode2, Sets.newHashSet(operator3, operator4), targetNodes);
-
-    OperatorMeta operator6 = assertNode(dag, "operator6");
-
-    List<OperatorMeta> rootNodes = dag.getRootOperators();
-    assertEquals("number root operators", 2, rootNodes.size());
-    assertTrue("root operator2", rootNodes.contains(operator1));
-    assertTrue("root operator6", rootNodes.contains(operator6));
-
-    for (OperatorMeta n : rootNodes) {
-      printTopology(n, dag, 0);
-    }
-
-  }
-
-  private void printTopology(OperatorMeta operator, DAG tplg, int level) {
-      String prefix = "";
-      if (level > 0) {
-        prefix = StringUtils.repeat(" ", 20*(level-1)) + "   |" + StringUtils.repeat("-", 17);
-      }
-      logger.debug(prefix  + operator.getName());
-      for (StreamMeta downStream : operator.getOutputStreams().values()) {
-          if (!downStream.getSinks().isEmpty()) {
-            for (LogicalPlan.InputPortMeta targetNode : downStream.getSinks()) {
-              printTopology(targetNode.getOperatorWrapper(), tplg, level+1);
-            }
-          }
-      }
-  }
-
-  @Test
-  public void testLoadFromPropertiesFile() throws IOException
-  {
-      Properties props = new Properties();
-      String resourcePath = "/testTopology.properties";
-      InputStream is = this.getClass().getResourceAsStream(resourcePath);
-      if (is == null) {
-        fail("Could not load " + resourcePath);
-      }
-      props.load(is);
-      LogicalPlanConfiguration pb = new LogicalPlanConfiguration(new Configuration(false))
-            .addFromProperties(props, null);
-
-      LogicalPlan dag = new LogicalPlan();
-      pb.populateDAG(dag);
-      dag.validate();
-
-      assertEquals("number of operator confs", 5, dag.getAllOperators().size());
-      assertEquals("number of root operators", 1, dag.getRootOperators().size());
-
-      StreamMeta s1 = dag.getStream("n1n2");
-      assertNotNull(s1);
-      assertTrue("n1n2 inline", DAG.Locality.CONTAINER_LOCAL == s1.getLocality());
-
-      OperatorMeta operator3 = dag.getOperatorMeta("operator3");
-      assertEquals("operator3.classname", GenericTestOperator.class, operator3.getOperator().getClass());
-
-      GenericTestOperator doperator3 = (GenericTestOperator)operator3.getOperator();
-      assertEquals("myStringProperty " + doperator3, "myStringPropertyValueFromTemplate", doperator3.getMyStringProperty());
-      assertFalse("booleanProperty " + doperator3, doperator3.booleanProperty);
-
-      OperatorMeta operator4 = dag.getOperatorMeta("operator4");
-      GenericTestOperator doperator4 = (GenericTestOperator)operator4.getOperator();
-      assertEquals("myStringProperty " + doperator4, "overrideOperator4", doperator4.getMyStringProperty());
-      assertEquals("setterOnlyOperator4 " + doperator4, "setterOnlyOperator4", doperator4.propertySetterOnly);
-      assertTrue("booleanProperty " + doperator4, doperator4.booleanProperty);
-
-      StreamMeta input1 = dag.getStream("inputStream");
-      assertNotNull(input1);
-      Assert.assertEquals("input1 source", dag.getOperatorMeta("inputOperator"), input1.getSource().getOperatorMeta());
-      Set<OperatorMeta> targetNodes = Sets.newHashSet();
-      for (LogicalPlan.InputPortMeta targetPort : input1.getSinks()) {
-        targetNodes.add(targetPort.getOperatorWrapper());
-      }
-
-      Assert.assertEquals("input1 target ", Sets.newHashSet(dag.getOperatorMeta("operator1"), operator3, operator4), targetNodes);
-
-  }
-
-  @Test
-  public void testLoadFromJson() throws Exception
-  {
-    String resourcePath = "/testTopology.json";
-    InputStream is = this.getClass().getResourceAsStream(resourcePath);
-    if (is == null) {
-      fail("Could not load " + resourcePath);
-    }
-    StringWriter writer = new StringWriter();
-
-    IOUtils.copy(is, writer);
-    JSONObject json = new JSONObject(writer.toString());
-
-    Configuration conf = new Configuration(false);
-    conf.set(StreamingApplication.DT_PREFIX + "operator.operator3.prop.myStringProperty", "o3StringFromConf");
-
-    LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
-    LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
-    dag.validate();
-
-    assertEquals("DAG attribute CONTAINER_JVM_OPTIONS ", dag.getAttributes().get(DAGContext.CONTAINER_JVM_OPTIONS), "-Xmx16m");
-    Map<Class<?>, Class<? extends StringCodec<?>>> stringCodecsMap = Maps.newHashMap();
-    stringCodecsMap.put(Integer.class, Integer2String.class);
-    assertEquals("DAG attribute STRING_CODECS ", stringCodecsMap, dag.getAttributes().get(DAGContext.STRING_CODECS));
-    assertEquals("DAG attribute CONTAINER_OPTS_CONFIGURATOR ", BasicContainerOptConfigurator.class, dag.getAttributes().get(DAGContext.CONTAINER_OPTS_CONFIGURATOR).getClass());
-
-    assertEquals("number of operator confs", 5, dag.getAllOperators().size());
-    assertEquals("number of root operators", 1, dag.getRootOperators().size());
-
-    StreamMeta s1 = dag.getStream("n1n2");
-    assertNotNull(s1);
-    assertTrue("n1n2 inline", DAG.Locality.CONTAINER_LOCAL == s1.getLocality());
-
-    OperatorMeta input = dag.getOperatorMeta("inputOperator");
-    TestStatsListener tsl = new TestStatsListener();
-    tsl.setIntProp(222);
-    List<StatsListener> sll = Lists.<StatsListener>newArrayList(tsl);
-    assertEquals("inputOperator STATS_LISTENERS attribute ", sll, input.getAttributes().get(OperatorContext.STATS_LISTENERS));
-    for(OutputPortMeta opm : input.getOutputStreams().keySet()){
-      assertTrue("output port of input Operator attribute is JsonStreamCodec ", opm.getAttributes().get(PortContext.STREAM_CODEC) instanceof JsonStreamCodec<?>);
-    }
-
-    OperatorMeta operator3 = dag.getOperatorMeta("operator3");
-    assertEquals("operator3.classname", GenericTestOperator.class, operator3.getOperator().getClass());
-
-    GenericTestOperator doperator3 = (GenericTestOperator)operator3.getOperator();
-    assertEquals("myStringProperty " + doperator3, "o3StringFromConf", doperator3.getMyStringProperty());
-    assertFalse("booleanProperty " + doperator3, doperator3.booleanProperty);
-
-    OperatorMeta operator4 = dag.getOperatorMeta("operator4");
-    GenericTestOperator doperator4 = (GenericTestOperator)operator4.getOperator();
-    assertEquals("myStringProperty " + doperator4, "overrideOperator4", doperator4.getMyStringProperty());
-    assertEquals("setterOnlyOperator4 " + doperator4, "setterOnlyOperator4", doperator4.propertySetterOnly);
-    assertTrue("booleanProperty " + doperator4, doperator4.booleanProperty);
-
-    StreamMeta input1 = dag.getStream("inputStream");
-    assertNotNull(input1);
-    OperatorMeta inputOperator = dag.getOperatorMeta("inputOperator");
-    Assert.assertEquals("input1 source", inputOperator, input1.getSource().getOperatorMeta());
-    Set<OperatorMeta> targetNodes = Sets.newHashSet();
-    for (LogicalPlan.InputPortMeta targetPort : input1.getSinks()) {
-      targetNodes.add(targetPort.getOperatorWrapper());
-    }
-    Assert.assertEquals("operator attribute " + inputOperator, 64, (int)inputOperator.getValue(OperatorContext.MEMORY_MB));
-    Assert.assertEquals("port attribute " + inputOperator, 8, (int)input1.getSource().getValue(PortContext.UNIFIER_LIMIT));
-    Assert.assertEquals("input1 target ", Sets.newHashSet(dag.getOperatorMeta("operator1"), operator3, operator4), targetNodes);
-  }
-
-  @Test
-  @SuppressWarnings("UnnecessaryBoxing")
-  public void testAppLevelAttributes()
-  {
-    String appName = "app1";
-
-    Properties props = new Properties();
-    props.put(StreamingApplication.DT_PREFIX + DAG.MASTER_MEMORY_MB.getName(), "123");
-    props.put(StreamingApplication.DT_PREFIX + DAG.CONTAINER_JVM_OPTIONS.getName(), "-Dlog4j.properties=custom_log4j.properties");
-    props.put(StreamingApplication.DT_PREFIX + DAG.APPLICATION_PATH.getName(), "/defaultdir");
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + "." + DAG.APPLICATION_PATH.getName(), "/otherdir");
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + "." + DAG.STREAMING_WINDOW_SIZE_MILLIS.getName(), "1000");
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    LogicalPlan dag = new LogicalPlan();
-
-    dagBuilder.populateDAG(dag);
-
-    dagBuilder.setApplicationConfiguration(dag, appName, null);
-
-    Assert.assertEquals("", "/otherdir", dag.getValue(DAG.APPLICATION_PATH));
-    Assert.assertEquals("", Integer.valueOf(123), dag.getValue(DAG.MASTER_MEMORY_MB));
-    Assert.assertEquals("", Integer.valueOf(1000), dag.getValue(DAG.STREAMING_WINDOW_SIZE_MILLIS));
-    Assert.assertEquals("", "-Dlog4j.properties=custom_log4j.properties", dag.getValue(DAG.CONTAINER_JVM_OPTIONS));
-
-  }
-  @Test
-  @SuppressWarnings("UnnecessaryBoxing")
-  public void testAppLevelProperties() {
-	  String appName ="app1";
-	  Properties props =new Properties();
-	  props.put(StreamingApplication.DT_PREFIX + "application."+appName+".testprop1","10");
-	  props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".prop.testprop2", "100");
-	  props.put(StreamingApplication.DT_PREFIX + "application.*.prop.testprop3","1000");
-	  props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".inncls.a", "10000");
-	  LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-	  LogicalPlan dag = new LogicalPlan();
-	  TestApplication app1Test=new TestApplication();
-
-	  dagBuilder.setApplicationConfiguration(dag, appName,app1Test);
-	  Assert.assertEquals("",Integer.valueOf(10),app1Test.getTestprop1());
-	  Assert.assertEquals("",Integer.valueOf(100),app1Test.getTestprop2());
-	  Assert.assertEquals("",Integer.valueOf(1000),app1Test.getTestprop3());
-	  Assert.assertEquals("",Integer.valueOf(10000),app1Test.getInncls().getA());
-  }
-
-  @Test
-  public void testPrepareDAG() {
-    final MutableBoolean appInitialized = new MutableBoolean(false);
-    StreamingApplication app = new StreamingApplication() {
-      @Override
-      public void populateDAG(DAG dag, Configuration conf)
-      {
-        Assert.assertEquals("", "hostname:9090", dag.getValue(DAG.GATEWAY_CONNECT_ADDRESS));
-        dag.setAttribute(DAG.GATEWAY_CONNECT_ADDRESS, "hostname:9091");
-        appInitialized.setValue(true);
-      }
-    };
-    Configuration conf = new Configuration(false);
-    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
-    LogicalPlanConfiguration pb = new LogicalPlanConfiguration(conf);
-
-    LogicalPlan dag = new LogicalPlan();
-    pb.prepareDAG(dag, app, "testconfig");
-
-    Assert.assertTrue("populateDAG called", appInitialized.booleanValue());
-    Assert.assertEquals("populateDAG overrides attribute", "hostname:9091", dag.getValue(DAG.GATEWAY_CONNECT_ADDRESS));
-  }
-
-  @Test
-  public void testOperatorConfigurationLookup() {
-
-    Properties props = new Properties();
-
-    // match operator by name
-    props.put(StreamingApplication.DT_PREFIX + "template.matchId1.matchIdRegExp", ".*operator1.*");
-    props.put(StreamingApplication.DT_PREFIX + "template.matchId1.stringProperty2", "stringProperty2Value-matchId1");
-    props.put(StreamingApplication.DT_PREFIX + "template.matchId1.nested.property", "nested.propertyValue-matchId1");
-
-    // match class name, lower priority
-    props.put(StreamingApplication.DT_PREFIX + "template.matchClass1.matchClassNameRegExp", ".*" + ValidationTestOperator.class.getSimpleName());
-    props.put(StreamingApplication.DT_PREFIX + "template.matchClass1.stringProperty2", "stringProperty2Value-matchClass1");
-
-    // match class name
-    props.put(StreamingApplication.DT_PREFIX + "template.t2.matchClassNameRegExp", ".*"+GenericTestOperator.class.getSimpleName());
-    props.put(StreamingApplication.DT_PREFIX + "template.t2.myStringProperty", "myStringPropertyValue");
-
-    // direct setting
-    props.put(StreamingApplication.DT_PREFIX + "operator.operator3.emitFormat", "emitFormatValue");
-
-    LogicalPlan dag = new LogicalPlan();
-    Operator operator1 = dag.addOperator("operator1", new ValidationTestOperator());
-    Operator operator2 = dag.addOperator("operator2", new ValidationTestOperator());
-    Operator operator3 = dag.addOperator("operator3", new GenericTestOperator());
-
-    LogicalPlanConfiguration pb = new LogicalPlanConfiguration(new Configuration(false));
-    LOG.debug("calling addFromProperties");
-    pb.addFromProperties(props, null);
-
-    Map<String, String> configProps = pb.getProperties(dag.getMeta(operator1), "appName");
-    Assert.assertEquals("" + configProps, 2, configProps.size());
-    Assert.assertEquals("" + configProps, "stringProperty2Value-matchId1", configProps.get("stringProperty2"));
-    Assert.assertEquals("" + configProps, "nested.propertyValue-matchId1", configProps.get("nested.property"));
-
-    configProps = pb.getProperties(dag.getMeta(operator2), "appName");
-    Assert.assertEquals("" + configProps, 1, configProps.size());
-    Assert.assertEquals("" + configProps, "stringProperty2Value-matchClass1", configProps.get("stringProperty2"));
-
-    configProps = pb.getProperties(dag.getMeta(operator3), "appName");
-    Assert.assertEquals("" + configProps, 2, configProps.size());
-    Assert.assertEquals("" + configProps, "myStringPropertyValue", configProps.get("myStringProperty"));
-    Assert.assertEquals("" + configProps, "emitFormatValue", configProps.get("emitFormat"));
-
-  }
-
-  @Test
-  public void testSetOperatorProperties() {
-
-    Configuration conf = new Configuration(false);
-    conf.set(StreamingApplication.DT_PREFIX + "operator.o1.prop.myStringProperty", "myStringPropertyValue");
-    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.prop.stringArrayField", "a,b,c");
-    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.prop.mapProperty.key1", "key1Val");
-    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.prop.mapProperty(key1.dot)", "key1dotVal");
-    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.prop.mapProperty(key2.dot)", "key2dotVal");
-
-    LogicalPlan dag = new LogicalPlan();
-    GenericTestOperator o1 = dag.addOperator("o1", new GenericTestOperator());
-    ValidationTestOperator o2 = dag.addOperator("o2", new ValidationTestOperator());
-
-    LogicalPlanConfiguration pb = new LogicalPlanConfiguration(conf);
-
-    pb.setOperatorProperties(dag, "testSetOperatorProperties");
-    Assert.assertEquals("o1.myStringProperty", "myStringPropertyValue", o1.getMyStringProperty());
-    Assert.assertArrayEquals("o2.stringArrayField", new String[] {"a", "b", "c"}, o2.getStringArrayField());
-
-    Assert.assertEquals("o2.mapProperty.key1", "key1Val", o2.getMapProperty().get("key1"));
-    Assert.assertEquals("o2.mapProperty(key1.dot)", "key1dotVal", o2.getMapProperty().get("key1.dot"));
-    Assert.assertEquals("o2.mapProperty(key2.dot)", "key2dotVal", o2.getMapProperty().get("key2.dot"));
-
-  }
-
-  @ApplicationAnnotation(name="AnnotatedAlias")
-  class AnnotatedApplication implements StreamingApplication {
-
-    @Override
-    public void populateDAG(DAG dag, Configuration conf)
-    {
-    }
-
-  }
-
-  @Test
-  public void testAppNameAttribute() {
-    StreamingApplication app = new AnnotatedApplication();
-    Configuration conf = new Configuration(false);
-    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
-
-    LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
-
-    Properties properties = new Properties();
-    properties.put(StreamingApplication.DT_PREFIX + "application.TestAliasApp.class", app.getClass().getName());
-
-    builder.addFromProperties(properties, null);
-
-    LogicalPlan dag = new LogicalPlan();
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-    dag.setAttribute(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME, "testApp");
-    builder.prepareDAG(dag, app, appPath);
-
-    Assert.assertEquals("Application name", "testApp", dag.getAttributes().get(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME));
-  }
-
-  @Test
-  public void testAppAlias() {
-    StreamingApplication app = new AnnotatedApplication();
-    Configuration conf = new Configuration(false);
-    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
-
-    LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
-
-    Properties properties = new Properties();
-    properties.put(StreamingApplication.DT_PREFIX + "application.TestAliasApp.class", app.getClass().getName());
-
-    builder.addFromProperties(properties, null);
-
-    LogicalPlan dag = new LogicalPlan();
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-    builder.prepareDAG(dag, app, appPath);
-
-    Assert.assertEquals("Application name", "TestAliasApp", dag.getAttributes().get(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME));
-  }
-
-
-  @Test
-  public void testAppAnnotationAlias() {
-    StreamingApplication app = new AnnotatedApplication();
-    Configuration conf = new Configuration(false);
-    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
-
-    LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
-
-    LogicalPlan dag = new LogicalPlan();
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-    builder.prepareDAG(dag, app, appPath);
-
-    Assert.assertEquals("Application name", "AnnotatedAlias", dag.getAttributes().get(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME));
-  }
-
-  @Test
-  @SuppressWarnings( {"UnnecessaryBoxing", "AssertEqualsBetweenInconvertibleTypes"})
-  public void testOperatorLevelAttributes() {
-    String appName = "app1";
-    StreamingApplication app = new StreamingApplication() {
-      @Override
-      public void populateDAG(DAG dag, Configuration conf)
-      {
-        dag.addOperator("operator1", GenericTestOperator.class);
-        dag.addOperator("operator2", GenericTestOperator.class);
-      }
-    };
-
-    Properties props = new Properties();
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
-    props.put(StreamingApplication.DT_PREFIX + "operator.*." + OperatorContext.APPLICATION_WINDOW_COUNT.getName(), "2");
-    props.put(StreamingApplication.DT_PREFIX + "operator.*." + OperatorContext.STATS_LISTENERS.getName(), PartitionLoadWatch.class.getName());
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1." + OperatorContext.APPLICATION_WINDOW_COUNT.getName(), "20");
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-
-    LogicalPlan dag = new LogicalPlan();
-    dagBuilder.prepareDAG(dag, app, appPath);
-
-    Assert.assertEquals("", Integer.valueOf(20), dag.getOperatorMeta("operator1").getValue(OperatorContext.APPLICATION_WINDOW_COUNT));
-    Assert.assertEquals("", Integer.valueOf(2), dag.getOperatorMeta("operator2").getValue(OperatorContext.APPLICATION_WINDOW_COUNT));
-    Assert.assertEquals("", PartitionLoadWatch.class, dag.getOperatorMeta("operator2").getValue(OperatorContext.STATS_LISTENERS).toArray()[0].getClass());
-  }
-
-  @Test
-  public void testOperatorLevelProperties() {
-    String appName = "app1";
-    final GenericTestOperator operator1 = new GenericTestOperator();
-    final GenericTestOperator operator2 = new GenericTestOperator();
-    StreamingApplication app = new StreamingApplication() {
-      @Override
-      public void populateDAG(DAG dag, Configuration conf)
-      {
-        dag.addOperator("operator1", operator1);
-        dag.addOperator("operator2", operator2);
-      }
-    };
-
-    Properties props = new Properties();
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
-    props.put(StreamingApplication.DT_PREFIX + "operator.*.myStringProperty", "pv1");
-    props.put(StreamingApplication.DT_PREFIX + "operator.*.booleanProperty", Boolean.TRUE.toString());
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1.myStringProperty", "apv1");
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-
-    LogicalPlan dag = new LogicalPlan();
-    dagBuilder.prepareDAG(dag, app, appPath);
-
-    Assert.assertEquals("apv1", operator1.getMyStringProperty());
-    Assert.assertEquals("pv1", operator2.getMyStringProperty());
-    Assert.assertEquals(true, operator2.isBooleanProperty());
-  }
-
-  @Test
-  public void testApplicationLevelParameter()
-  {
-    String appName = "app1";
-    final GenericTestOperator operator1 = new GenericTestOperator();
-    final GenericTestOperator operator2 = new GenericTestOperator();
-    StreamingApplication app = new StreamingApplication()
-    {
-      @Override
-      public void populateDAG(DAG dag, Configuration conf)
-      {
-        dag.addOperator("operator1", operator1);
-        dag.addOperator("operator2", operator2);
-      }
-    };
-
-    Properties props = new Properties();
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
-    props.put(StreamingApplication.DT_PREFIX + "operator.*.myStringProperty", "foo ${xyz} bar ${zzz} baz");
-    props.put(StreamingApplication.DT_PREFIX + "operator.*.booleanProperty", Boolean.TRUE.toString());
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1.myStringProperty", "apv1");
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-
-    Configuration vars = new Configuration(false);
-    vars.set("xyz", "123");
-    vars.set("zzz", "456");
-    dagBuilder.addFromProperties(props, vars);
-
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-
-    LogicalPlan dag = new LogicalPlan();
-    dagBuilder.prepareDAG(dag, app, appPath);
-
-    Assert.assertEquals("apv1", operator1.getMyStringProperty());
-    Assert.assertEquals("foo 123 bar 456 baz", operator2.getMyStringProperty());
-    Assert.assertEquals(true, operator2.isBooleanProperty());
-  }
-
-  @Test
-  @SuppressWarnings("UnnecessaryBoxing")
-  public void testPortLevelAttributes() {
-    String appName = "app1";
-    SimpleTestApplication app = new SimpleTestApplication();
-
-    Properties props = new Properties();
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1.port.*." + PortContext.QUEUE_CAPACITY.getName(), "" + 16 * 1024);
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator2.inputport.inport1." + PortContext.QUEUE_CAPACITY.getName(), "" + 32 * 1024);
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator2.outputport.outport1." + PortContext.QUEUE_CAPACITY.getName(), "" + 32 * 1024);
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator3.port.*." + PortContext.QUEUE_CAPACITY.getName(), "" + 16 * 1024);
-    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator3.inputport.inport2." + PortContext.QUEUE_CAPACITY.getName(), "" + 32 * 1024);
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-
-    LogicalPlan dag = new LogicalPlan();
-    dagBuilder.prepareDAG(dag, app, appPath);
-
-    OperatorMeta om1 = dag.getOperatorMeta("operator1");
-    Assert.assertEquals("", Integer.valueOf(16 * 1024), om1.getMeta(app.gt1.outport1).getValue(PortContext.QUEUE_CAPACITY));
-    OperatorMeta om2 = dag.getOperatorMeta("operator2");
-    Assert.assertEquals("", Integer.valueOf(32 * 1024), om2.getMeta(app.gt2.inport1).getValue(PortContext.QUEUE_CAPACITY));
-    Assert.assertEquals("", Integer.valueOf(32 * 1024), om2.getMeta(app.gt2.outport1).getValue(PortContext.QUEUE_CAPACITY));
-    OperatorMeta om3 = dag.getOperatorMeta("operator3");
-    Assert.assertEquals("", Integer.valueOf(16 * 1024), om3.getMeta(app.gt3.inport1).getValue(PortContext.QUEUE_CAPACITY));
-    Assert.assertEquals("", Integer.valueOf(32 * 1024), om3.getMeta(app.gt3.inport2).getValue(PortContext.QUEUE_CAPACITY));
-  }
-
-
-  @Test
-  public void testInvalidAttribute() throws Exception {
-    Assert.assertNotSame(0, com.datatorrent.api.Context.DAGContext.serialVersionUID);
-    Attribute<String> attribute = new Attribute<>("", null);
-
-    Field nameField = Attribute.class.getDeclaredField("name");
-    nameField.setAccessible(true);
-    nameField.set(attribute, "NOT_CONFIGURABLE");
-    nameField.setAccessible(false);
-
-    ContextUtils.addAttribute(com.datatorrent.api.Context.DAGContext.class, attribute);
-    AttributeParseUtils.initialize();
-    ConfElement.initialize();
-
-    // attribute that cannot be configured
-
-    Properties props = new Properties();
-    props.put(StreamingApplication.DT_PREFIX + "attr.NOT_CONFIGURABLE", "value");
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    try {
-      dagBuilder.prepareDAG(new LogicalPlan(), null, "");
-      Assert.fail("Exception expected");
-    } catch (Exception e) {
-      Assert.assertThat("Attribute not configurable", e.getMessage(), RegexMatcher.matches("Attribute does not support property configuration: NOT_CONFIGURABLE.*"));
-    }
-
-    ContextUtils.removeAttribute(com.datatorrent.api.Context.DAGContext.class, attribute);
-    AttributeParseUtils.initialize();
-    ConfElement.initialize();
-
-    // invalid attribute name
-    props = new Properties();
-    String invalidAttribute = StreamingApplication.DT_PREFIX + "attr.INVALID_NAME";
-    props.put(invalidAttribute, "value");
-
-    try {
-      new LogicalPlanConfiguration(new Configuration(false)).addFromProperties(props, null);
-      Assert.fail("Exception expected");
-    } catch (Exception e) {
-      LOG.debug("Exception message: {}", e.getMessage());
-      Assert.assertThat("Invalid attribute name", e.getMessage(), RegexMatcher.matches("Invalid attribute reference: " + invalidAttribute));
-    }
-  }
-
-  @Test
-  public void testAttributesCodec() {
-    Assert.assertNotSame(null, new Long[] {com.datatorrent.api.Context.DAGContext.serialVersionUID, OperatorContext.serialVersionUID, PortContext.serialVersionUID});
-    @SuppressWarnings("unchecked")
-    Set<Class<? extends Context>> contextClasses = Sets.newHashSet(com.datatorrent.api.Context.DAGContext.class, OperatorContext.class, PortContext.class);
-    for (Class<?> c : contextClasses) {
-      for (Attribute<Object> attr : AttributeInitializer.getAttributes(c)) {
-        Assert.assertNotNull(attr.name + " codec", attr.codec);
-      }
-    }
-  }
-
-  @Test
-  public void testTupleClassAttr() throws Exception
-  {
-    String resourcePath = "/schemaTestTopology.json";
-    InputStream is = this.getClass().getResourceAsStream(resourcePath);
-    if (is == null) {
-      fail("Could not load " + resourcePath);
-    }
-    StringWriter writer = new StringWriter();
-
-    IOUtils.copy(is, writer);
-    JSONObject json = new JSONObject(writer.toString());
-
-    Configuration conf = new Configuration(false);
-
-    LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
-    LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
-    dag.validate();
-
-    OperatorMeta operator1 = dag.getOperatorMeta("operator1");
-    assertEquals("operator1.classname", SchemaTestOperator.class, operator1.getOperator().getClass());
-
-    StreamMeta input1 = dag.getStream("inputStream");
-    assertNotNull(input1);
-    for (LogicalPlan.InputPortMeta targetPort : input1.getSinks()) {
-      Assert.assertEquals("tuple class name required", TestSchema.class, targetPort.getAttributes().get(PortContext.TUPLE_CLASS));
-    }
-  }
-
-  @Test(expected = ValidationException.class)
-  public void testTupleClassAttrValidation() throws Exception
-  {
-    String resourcePath = "/schemaTestTopology.json";
-    InputStream is = this.getClass().getResourceAsStream(resourcePath);
-    if (is == null) {
-      fail("Could not load " + resourcePath);
-    }
-    StringWriter writer = new StringWriter();
-
-    IOUtils.copy(is, writer);
-    JSONObject json = new JSONObject(writer.toString());
-
-    //removing schema so that validation fails
-    json.getJSONArray("streams").getJSONObject(0).remove("schema");
-    Configuration conf = new Configuration(false);
-
-    LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
-    LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
-
-    dag.validate();
-  }
-
-  @Test
-  public void testTestTupleClassAttrSetFromConfig()
-  {
-    Configuration conf = new Configuration(false);
-    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.port.schemaRequiredPort.attr.TUPLE_CLASS",
-      "com.datatorrent.stram.plan.LogicalPlanConfigurationTest$TestSchema");
-
-    StreamingApplication streamingApplication = new StreamingApplication()
-    {
-      @Override
-      public void populateDAG(DAG dag, Configuration conf)
-      {
-        TestGeneratorInputOperator o1 = dag.addOperator("o1", new TestGeneratorInputOperator());
-        SchemaTestOperator o2 = dag.addOperator("o2", new SchemaTestOperator());
-        dag.addStream("stream", o1.outport, o2.schemaRequiredPort);
-      }
-    };
-    LogicalPlan dag = new LogicalPlan();
-    LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(conf);
-    lpc.prepareDAG(dag, streamingApplication, "app");
-    dag.validate();
-  }
-
-  /**
-   * This test and all of the following ambiguous attribute tests verify that when an ambiguous attribute
-   * name is provided, all the corresponding attributes are set.
-   * <br/><br/>
-   * <b>Note:</b> Ambiguous attribute means that when multiple attributes with the same
-   * simple name exist for multiple types of dag elements (like operators and ports).
-   * An example of such attributes are the com.datatorrent.api.Context.OperatorContext.AUTO_RECORD
-   * and com.datatorrent.api.Context.PortContext.AUTO_RECORD.
-   * <br/><br/>
-   * This test should set the attribute on the operators and ports.
-   */
-  /**
-   * This test should set the attribute on the operators and ports.
-   */
-  @Test
-  public void testRootLevelAmbiguousAttributeSimple()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX,
-                                       null,
-                                       Boolean.TRUE,
-                                       true,
-                                       true);
-  }
-
-  /**
-   * This test should set the attribute on the operators and ports.
-   */
-  @Test
-  public void testApplicationLevelAmbiguousAttributeSimple()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX
-                                       + "application"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR
-                                       + "*"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                       null,
-                                       Boolean.TRUE,
-                                       true,
-                                       true);
-  }
-
-  /**
-   * This should only set the attribute on the operator
-   */
-  @Test
-  public void testOperatorLevelAmbiguousAttributeSimple()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX
-                                       + "operator"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR
-                                       + "*"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                       null,
-                                       Boolean.TRUE,
-                                       true,
-                                       false);
-  }
-
-  /**
-   * This should only set the attribute on the port
-   */
-  @Test
-  public void testPortLevelAmbiguousAttributeSimple()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX
-                                       + "port"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR
-                                       + "*"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                       null,
-                                       Boolean.TRUE,
-                                       false,
-                                       true);
-  }
-
-  /**
-   * This test should set the attribute on the operators and ports.
-   */
-  @Test
-  public void testRootLevelAmbiguousAttributeComplex()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX,
-                                       PortContext.class.getCanonicalName(),
-                                       Boolean.TRUE,
-                                       false,
-                                       true);
-  }
-
-  /**
-   * This test should set the attribute on the operators and ports.
-   */
-  @Test
-  public void testApplicationLevelAmbiguousAttributeComplex()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX
-                                       + "application"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR
-                                       + "*"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                       PortContext.class.getCanonicalName(),
-                                       Boolean.TRUE,
-                                       false,
-                                       true);
-  }
-
-  /**
-   * This should only set the attribute on the operator
-   */
-  @Test
-  public void testOperatorLevelAmbiguousAttributeComplex()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX
-                                       + "operator"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR
-                                       + "*"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                       OperatorContext.class.getCanonicalName(),
-                                       Boolean.TRUE,
-                                       true,
-                                       false);
-  }
-
-  /**
-   * This should only set the attribute on the port
-   */
-  @Test
-  public void testOperatorLevelAmbiguousAttributeComplex2()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX
-                                       + "operator"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR
-                                       + "*"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                       PortContext.class.getCanonicalName(),
-                                       Boolean.TRUE,
-                                       false,
-                                       true);
-  }
-
-  /**
-   * This should only set the attribute on the port
-   */
-  @Test
-  public void testPortLevelAmbiguousAttributeComplex()
-  {
-    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD,
-                                       Context.PortContext.AUTO_RECORD,
-                                       StreamingApplication.DT_PREFIX
-                                       + "port"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR
-                                       + "*"
-                                       + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                       PortContext.class.getCanonicalName(),
-                                       Boolean.TRUE,
-                                       false,
-                                       true);
-  }
-
-  private void testAttributeAmbiguousSimpleHelper(Attribute<?> attributeObjOperator,
-                                                  Attribute<?> attributeObjPort,
-                                                  String root,
-                                                  String contextClass,
-                                                  Object val,
-                                                  boolean operatorSet,
-                                                  boolean portSet)
-  {
-    Properties props = propertiesBuilder(attributeObjOperator.getSimpleName(),
-                                         root,
-                                         contextClass,
-                                         val);
-
-    simpleAttributeOperatorHelperAssert(attributeObjOperator,
-                                        props,
-                                        val,
-                                        operatorSet);
-
-    simpleNamePortAssertHelperAssert(attributeObjPort,
-                                     props,
-                                     val,
-                                     portSet);
-  }
-
-  @Test
-  public void testRootLevelAttributeSimpleNameOperator()
-  {
-    simpleAttributeOperatorHelper(OperatorContext.MEMORY_MB,
-                                  StreamingApplication.DT_PREFIX,
-                                  true,
-                                  (Integer)4096,
-                                  true,
-                                  true);
-  }
-
-  @Test
-  public void testRootLevelStorageAgentSimpleNameOperator()
-  {
-    MockStorageAgent mockAgent = new MockStorageAgent();
-
-    simpleAttributeOperatorHelper(OperatorContext.STORAGE_AGENT,
-                                  StreamingApplication.DT_PREFIX,
-                                  true,
-                                  mockAgent,
-                                  true,
-                                  false);
-  }
-
-  @Test
-  public void testRootLevelAttributeSimpleNameOperatorNoScope()
-  {
-    simpleAttributeOperatorHelper(OperatorContext.MEMORY_MB,
-                                  StreamingApplication.DT_PREFIX,
-                                  true,
-                                  (Integer)4096,
-                                  true,
-                                  false);
-  }
-
-  @Test
-  public void testApplicationLevelAttributeSimpleNameOperator()
-  {
-    simpleAttributeOperatorHelper(OperatorContext.MEMORY_MB,
-                                  StreamingApplication.DT_PREFIX
-                                  + "application"
-                                  + LogicalPlanConfiguration.KEY_SEPARATOR
-                                  + "SimpleTestApp"
-                                  + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                  true,
-                                  (Integer)4096,
-                                  true,
-                                  true);
-  }
-
-  private void simpleAttributeOperatorHelper(Attribute<?> attributeObj,
-                                             String root,
-                                             boolean simpleName,
-                                             Object val,
-                                             boolean set,
-                                             boolean scope)
-  {
-    Properties props = propertiesBuilderOperator(attributeObj.getSimpleName(),
-                                                 root,
-                                                 simpleName,
-                                                 val,
-                                                 scope);
-
-    simpleAttributeOperatorHelperAssert(attributeObj,
-                                        props,
-                                        val,
-                                        set);
-  }
-
-  private void simpleAttributeOperatorHelperAssert(Attribute<?> attributeObj,
-                                                   Properties props,
-                                                   Object val,
-                                                   boolean set)
-  {
-    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-
-    LogicalPlan dag = new LogicalPlan();
-    dagBuilder.prepareDAG(dag, app, appPath);
-
-    OperatorMeta om1 = dag.getOperatorMeta("operator1");
-
-    if (set) {
-      Assert.assertEquals(val, om1.getValue(attributeObj));
-    } else {
-      Assert.assertNotEquals(val, om1.getValue(attributeObj));
-    }
-
-    OperatorMeta om2 = dag.getOperatorMeta("operator2");
-
-    if (set) {
-      Assert.assertEquals(val, om2.getValue(attributeObj));
-    } else {
-      Assert.assertNotEquals(val, om2.getValue(attributeObj));
-    }
-
-    OperatorMeta om3 = dag.getOperatorMeta("operator3");
-
-    if (set) {
-      Assert.assertEquals(val, om3.getValue(attributeObj));
-    } else {
-      Assert.assertNotEquals(val, om3.getValue(attributeObj));
-    }
-  }
-
-  /* Port tests */
-  @Test
-  public void testRootLevelAttributeSimpleNamePort()
-  {
-    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY,
-                              StreamingApplication.DT_PREFIX,
-                              true,
-                              (Integer)4096,
-                              true,
-                              true);
-  }
-
-  @Test
-  public void testRootLevelAttributeSimpleNamePortNoScope()
-  {
-    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY,
-                              StreamingApplication.DT_PREFIX,
-                              true,
-                              (Integer)4096,
-                              true,
-                              false);
-  }
-
-  @Test
-  public void testOperatorLevelAttributeSimpleNamePort()
-  {
-    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY,
-                              StreamingApplication.DT_PREFIX
-                              + "operator"
-                              + LogicalPlanConfiguration.KEY_SEPARATOR
-                              + "*"
-                              + LogicalPlanConfiguration.KEY_SEPARATOR,
-                              true,
-                              (Integer)4096,
-                              true,
-                              true);
-  }
-
-  @Test
-  public void testApplicationLevelAttributeSimpleNamePort()
-  {
-    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY,
-                              StreamingApplication.DT_PREFIX
-                              + "application"
-                              + LogicalPlanConfiguration.KEY_SEPARATOR
-                              + "SimpleTestApp"
-                              + LogicalPlanConfiguration.KEY_SEPARATOR,
-                              true,
-                              (Integer)4096,
-                              true,
-                              true);
-  }
-
-  @Test
-  public void testRootLevelAttributeComplexNamePort()
-  {
-    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY,
-                              StreamingApplication.DT_PREFIX,
-                              false,
-                              (Integer)4096,
-                              true,
-                              true);
-  }
-
-  @Test
-  public void testRootLevelAttributeComplexNamePortNoScope()
-  {
-    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY,
-                              StreamingApplication.DT_PREFIX,
-                              false,
-                              (Integer)4096,
-                              true,
-                              false);
-  }
-
-  @Test
-  public void testOperatorLevelAttributeComplexNamePort()
-  {
-    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY,
-                              StreamingApplication.DT_PREFIX
-                              + "operator"
-                              + LogicalPlanConfiguration.KEY_SEPARATOR
-                              + "*"
-                              + LogicalPlanConfiguration.KEY_SEPARATOR,
-                              false,
-                              (Integer)4096,
-                              true,
-                              true);
-  }
-
-  @Test
-  public void testApplicationLevelAttributeComplexNamePort()
-  {
-    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY,
-                              StreamingApplication.DT_PREFIX
-                              + "application"
-                              + LogicalPlanConfiguration.KEY_SEPARATOR
-                              + "SimpleTestApp"
-                              + LogicalPlanConfiguration.KEY_SEPARATOR,
-                              false,
-                              (Integer)4096,
-                              true,
-                              true);
-  }
-
-  /* Input port tests */
-  @Test
-  public void testRootLevelAttributeSimpleNameInputPort()
-  {
-    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY,
-                                   StreamingApplication.DT_PREFIX,
-                                   true,
-                                   (Integer)4096,
-                                   true);
-  }
-
-  @Test
-  public void testOperatorLevelAttributeSimpleNameInputPort()
-  {
-    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY,
-                                   StreamingApplication.DT_PREFIX
-                                   + "operator"
-                                   + LogicalPlanConfiguration.KEY_SEPARATOR
-                                   + "*"
-                                   + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                   true,
-                                   (Integer)4096,
-                                   true);
-  }
-
-  @Test
-  public void testApplicationLevelAttributeSimpleNameInputPort()
-  {
-    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY,
-                                   StreamingApplication.DT_PREFIX
-                                   + "application"
-                                   + LogicalPlanConfiguration.KEY_SEPARATOR
-                                   + "SimpleTestApp"
-                                   + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                   true,
-                                   (Integer)4096,
-                                   true);
-  }
-
-  @Test
-  public void testRootLevelAttributeComplexNameInputPort()
-  {
-    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY,
-                                   StreamingApplication.DT_PREFIX,
-                                   false,
-                                   (Integer)4096,
-                                   true);
-  }
-
-  @Test
-  public void testOperatorLevelAttributeComplexNameInputPort()
-  {
-    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY,
-                                   StreamingApplication.DT_PREFIX
-                                   + "operator"
-                                   + LogicalPlanConfiguration.KEY_SEPARATOR
-                                   + "*"
-                                   + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                   false,
-                                   (Integer)4096,
-                                   true);
-  }
-
-  @Test
-  public void testApplicationLevelAttributeComplexNameInputPort()
-  {
-    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY,
-                                   StreamingApplication.DT_PREFIX
-                                   + "application"
-                                   + LogicalPlanConfiguration.KEY_SEPARATOR
-                                   + "SimpleTestApp"
-                                   + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                   false,
-                                   (Integer)4096,
-                                   true);
-  }
-
-  /* Output port tests */
-  @Test
-  public void testRootLevelAttributeSimpleNameOutputPort()
-  {
-    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY,
-                                    StreamingApplication.DT_PREFIX,
-                                    true,
-                                    (Integer)4096,
-                                    true);
-  }
-
-  @Test
-  public void testOperatorLevelAttributeSimpleNameOutputPort()
-  {
-    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY,
-                                    StreamingApplication.DT_PREFIX
-                                    + "operator"
-                                    + LogicalPlanConfiguration.KEY_SEPARATOR
-                                    + "*"
-                                    + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                    true,
-                                    (Integer)4096,
-                                    true);
-  }
-
-  @Test
-  public void testApplicationLevelAttributeSimpleNameOutputPort()
-  {
-    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY,
-                                    StreamingApplication.DT_PREFIX
-                                    + "application"
-                                    + LogicalPlanConfiguration.KEY_SEPARATOR
-                                    + "SimpleTestApp"
-                                    + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                    true,
-                                    (Integer)4096,
-                                    true);
-  }
-
-  @Test
-  public void testRootLevelAttributeComplexNameOutputPort()
-  {
-    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY,
-                                    StreamingApplication.DT_PREFIX,
-                                    false,
-                                    (Integer)4096,
-                                    true);
-  }
-
-  @Test
-  public void testOperatorLevelAttributeComplexNameOutputPort()
-  {
-    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY,
-                                    StreamingApplication.DT_PREFIX
-                                    + "operator"
-                                    + LogicalPlanConfiguration.KEY_SEPARATOR
-                                    + "*"
-                                    + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                    false,
-                                    (Integer)4096,
-                                    true);
-  }
-
-  @Test
-  public void testApplicationLevelAttributeComplexNameOutputPort()
-  {
-    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY,
-                                    StreamingApplication.DT_PREFIX
-                                    + "application"
-                                    + LogicalPlanConfiguration.KEY_SEPARATOR
-                                    + "SimpleTestApp"
-                                    + LogicalPlanConfiguration.KEY_SEPARATOR,
-                                    false,
-                                    (Integer)4096,
-                                    true);
-  }
-
-  /* Helpers for building ports */
-  private void simpleAttributePortHelper(Attribute<?> attributeObj,
-                                         String root,
-                                         boolean simpleName,
-                                         Object val,
-                                         boolean set,
-                                         boolean scope)
-  {
-    Properties props = propertiesBuilderPort(attributeObj.getSimpleName(),
-                                             root,
-                                             simpleName,
-                                             val,
-                                             scope);
-
-    simpleNamePortAssertHelperAssert(attributeObj,
-                                     props,
-                                     val,
-                                     set);
-  }
-
-  private void simpleAttributeInputPortHelper(Attribute<?> attributeObj,
-                                              String root,
-                                              boolean simpleName,
-                                              Object val,
-                                              boolean set)
-  {
-    Properties props = propertiesBuilderInputPort(attributeObj.getSimpleName(),
-                                                  root,
-                                                  simpleName,
-                                                  val);
-
-    simpleNameInputPortAssertHelperAssert(attributeObj,
-                                          props,
-                                          val,
-                                          set);
-
-    simpleNameOutputPortAssertHelperAssert(attributeObj,
-                                           props,
-                                           val,
-                                           !set);
-  }
-
-  private void simpleAttributeOutputPortHelper(Attribute<?> attributeObj,
-                                               String root,
-                                               boolean simpleName,
-                                               Object val,
-                                               boolean set)
-  {
-    Properties props = propertiesBuilderOutputPort(attributeObj.getSimpleName(),
-                                                   root,
-                                                   simpleName,
-                                                   val);
-
-    simpleNameOutputPortAssertHelperAssert(attributeObj,
-                                           props,
-                                           val,
-                                           set);
-
-    simpleNameInputPortAssertHelperAssert(attributeObj,
-                                          props,
-                                          val,
-                                          !set);
-  }
-
-  private void simpleNamePortAssertHelperAssert(Attribute<?> attributeObj,
-                                                Properties props,
-                                                Object val,
-                                                boolean set)
-  {
-    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-
-    LogicalPlan dag = new LogicalPlan();
-    dagBuilder.prepareDAG(dag, app, appPath);
-
-    simpleNamePortAssertHelper(attributeObj,
-                               dag,
-                               "operator1",
-                               val,
-                               set);
-
-    simpleNamePortAssertHelper(attributeObj,
-                               dag,
-                               "operator2",
-                               val,
-                               set);
-
-    simpleNamePortAssertHelper(attributeObj,
-                               dag,
-                               "operator3",
-                               val,
-                               set);
-  }
-
-  private void simpleNameInputPortAssertHelperAssert(Attribute<?> attributeObj,
-                                                     Properties props,
-                                                     Object val,
-                                                     boolean set)
-  {
-    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-
-    LogicalPlan dag = new LogicalPlan();
-    dagBuilder.prepareDAG(dag, app, appPath);
-
-    simpleNameInputPortAssertHelper(attributeObj,
-                                    dag,
-                                    "operator1",
-                                    val,
-                                    set);
-
-    simpleNameInputPortAssertHelper(attributeObj,
-                                    dag,
-                                    "operator2",
-                                    val,
-                                    set);
-
-    simpleNameInputPortAssertHelper(attributeObj,
-                                    dag,
-                                    "operator3",
-                                    val,
-                                    set);
-  }
-
-  private void simpleNameOutputPortAssertHelperAssert(Attribute<?> attributeObj,
-                                                      Properties props,
-                                                      Object val,
-                                                      boolean set)
-  {
-    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
-
-    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
-    dagBuilder.addFromProperties(props, null);
-
-    String appPath = app.getClass().getName().replace(".", "/") + ".class";
-
-    LogicalPlan dag = new LogicalPlan();
-    dagBuilder.prepareDAG(dag, app, appPath);
-
-    simpleNameOutputPortAssertHelper(attributeObj,
-                                     dag,
-                                     "operator1",
-                                     val,
-                                     set);
-
-    simpleNameOutputPortAssertHelper(attributeObj,
-                                     dag,
-                                     "operator2",
-                                     val,
-                                     set);
-
-    simpleNameOutputPortAssertHelper(attributeObj,
-                                     dag,
-                                     "operator3",
-                                     val,
-                                     set);
-  }
-
-  private void simpleNamePortAssertHelper(Attribute<?> attributeObj,
-                                          LogicalPlan dag,
-                                          String operatorName,
-                                          Object queueCapacity,
-                                          boolean set)
-  {
-    simpleNameInputPortAssertHelper(attributeObj,
-                                    dag,
-                                    operatorName,
-                                    queueCapacity,
-                                    set);
-
-    simpleNameOutputPortAssertHelper(attributeObj,
-                                     dag,
-                                     operatorName,
-                                     queueCapacity,
-                                     set);
-  }
-
-  private void simpleNameInputPortAssertHelper(Attribute<?> attributeObj,
-                                               LogicalPlan dag,
-                                               String operatorName,
-                                               Object queueCapacity,
-                                               boolean set)
-  {
-    OperatorMeta operatorMeta = dag.getOperatorMeta(operatorName);
-
-    for (InputPortMeta inputPortMeta: operatorMeta.getInputStreams().keySet()) {
-      if (set) {
-        Assert.assertEquals(queueCapacity, inputPortMeta.getValue(attributeObj));
-      } else {
-        Assert.assertNotEquals(queueCapacity, inputPortMeta.getValue(attributeObj));
-      }
-    }
-  }
-
-  private void simpleNameOutputPortAssertHelper(Attribute<?> attributeObj,
-                                                LogicalPlan dag,
-                                                String operatorName,
-                                                Object queueCapacity,
-                                                boolean set)
-  {
-    OperatorMeta operatorMeta = dag.getOperatorMeta(operatorName);
-
-    for (OutputPortMeta outputPortMeta: operatorMeta.getOutputStreams().keySet()) {
-      if (set) {
-        Assert.assertEquals(queueCapacity, outputPortMeta.getValue(attributeObj));
-      } else {
-        Assert.assertNotEquals(queueCapacity, outputPortMeta.getValue(attributeObj));
-      }
-    }
-  }
-
-  /* Helpers for building properties */
-  private Properties propertiesBuilder(String attributeName,
-                                       String root,
-                                       String contextClass,
-                                       Object val)
-  {
-    boolean simpleName = contextClass == null;
-
-    if (!simpleName) {
-      attributeName = contextClass
-                      + LogicalPlanConfiguration.KEY_SEPARATOR
-                      + attributeName;
-    }
-
-    Properties props = new Properties();
-
-    String propName = root
-                      + StramElement.ATTR.getValue()
-                      + LogicalPlanConfiguration.KEY_SEPARATOR
-                      + attributeName;
-
-    LOG.debug("adding prop {} with value {}", propName, val.toString());
-
-    props.put(propName,
-              val.toString());
-
-    return props;
-  }
-
-  private Properties propertiesBuilderOperator(String attributeName,
-                                               String root,
-                                               boolean simpleName,
-                                               Object val,
-                                               boolean addOperator)
-  {
-    String contextClass = simpleName ? null : OperatorContext.class.getCanonicalName();
-
-    if (addOperator) {
-      root += "operator"
-              + LogicalPlanConfiguration.KEY_SEPARATOR
-              + "*"
-              + LogicalPlanConfiguration.KEY_SEPARATOR;
-    }
-
-    return propertiesBuilder(attributeName,
-                             root,
-                             contextClass,
-                             val);
-  }
-
-  private Properties propertiesBuilderPort(String attributeName,
-                                           String root,
-                                           boolean simpleName,
-                                           Object val,
-                                           boolean addPort)
-  {
-    String contextClass = simpleName ? null : PortContext.class.getCanonicalName();
-
-    if (addPort) {
-      root += "port"
-              + LogicalPlanConfiguration.KEY_SEPARATOR
-              + "*"
-              + LogicalPlanConfiguration.KEY_SEPARATOR;
-    }
-
-    return propertiesBuilder(attributeName,
-                             root,
-                             contextClass,
-                             val);
-  }
-
-  private Properties propertiesBuilderInputPort(String attributeName,
-                                                String root,
-                                                boolean simpleName,
-                                                Object val)
-  {
-    String contextClass = simpleName ? null: PortContext.class.getCanonicalName();
-
-    root += "inputport" +
-            LogicalPlanConfiguration.KEY_SEPARATOR +
-            "*" +
-            LogicalPlanConfiguration.KEY_SEPARATOR;
-
-    return propertiesBuilder(attributeName,
-                             root,
-                             contextClass,
-                             val);
-  }
-
-  private Properties propertiesBuilderOutputPort(String attributeName,
-                                                 String root,
-                                                 boolean simpleName,
-                                                 Object val)
-  {
-    String contextClass = simpleName ? null: PortContext.class.getCanonicalName();
-
-    root += "outputport" +
-            LogicalPlanConfiguration.KEY_SEPARATOR +
-            "*" +
-            LogicalPlanConfiguration.KEY_SEPARATOR;
-
-    return propertiesBuilder(attributeName,
-                             root,
-                             contextClass,
-                             val);
-  }
-
-  private static final Logger logger = LoggerFactory.getLogger(LogicalPlanConfigurationTest.class);
-
-  public static class TestApplication implements StreamingApplication {
-    Integer testprop1;
-    Integer testprop2;
-    Integer testprop3;
-    TestInnerClass inncls;
-    public TestApplication() {
-      inncls=new TestInnerClass();
-    }
-
-    public Integer getTestprop1() {
-      return testprop1;
-    }
-
-    public void setTestprop1(Integer testprop1) {
-      this.testprop1 = testprop1;
-    }
-
-    public Integer getTestprop2() {
-      return testprop2;
-    }
-
-    public void setTestprop2(Integer testprop2) {
-      this.testprop2 = testprop2;
-    }
-
-    public Integer getTestprop3() {
-      return testprop3;
-    }
-
-    public void setTestprop3(Integer testprop3) {
-      this.testprop3 = testprop3;
-    }
-
-    public TestInnerClass getInncls() {
-      return inncls;
-    }
-
-    public void setInncls(TestInnerClass inncls) {
-      this.inncls = inncls;
-    }
-
-    @Override
-    public void populateDAG(DAG dag, Configuration conf) {
-
-    }
-    public class TestInnerClass{
-      Integer a;
-
-      public Integer getA() {
-        return a;
-      }
-
-      public void setA(Integer a) {
-        this.a = a;
-      }
-    }
-  }
-
-  public static class TestStatsListener implements StatsListener{
-
-    private int intProp;
-
-    public TestStatsListener()
-    {
-    }
-
-    @Override
-    public Response processStats(BatchedOperatorStats stats)
-    {
-      return null;
-    }
-
-    public int getIntProp()
-    {
-      return intProp;
-    }
-
-    public void setIntProp(int intProp)
-    {
-      this.intProp = intProp;
-    }
-
-    @Override
-    public int hashCode()
-    {
-      final int prime = 31;
-      int result = 1;
-      result = prime * result + intProp;
-      return result;
-    }
-
-    @Override
-    public boolean equals(Object obj)
-    {
-      if (this == obj)
-        return true;
-      if (obj == null)
-        return false;
-      if (getClass() != obj.getClass())
-        return false;
-      TestStatsListener other = (TestStatsListener) obj;
-      if (intProp != other.intProp)
-        return false;
-      return true;
-    }
-  }
-
-  public static class TestSchema
-  {
-  }
-
-  public static class SimpleTestApplication implements StreamingApplication
-  {
-    public final GenericTestOperator gt1 = new GenericTestOperator();
-    public final GenericTestOperator gt2 = new GenericTestOperator();
-    public final GenericTestOperator gt3 = new GenericTestOperator();
-
-    @Override
-    public void populateDAG(DAG dag, Configuration conf)
-    {
-      dag.addOperator("operator1", gt1);
-      dag.addOperator("operator2", gt2);
-      dag.addOperator("operator3", gt3);
-      dag.addStream("s1", gt1.outport1, gt2.inport1);
-      dag.addStream("s2", gt2.outport1, gt3.inport1, gt3.inport2);
-    }
-  };
-
-  @ApplicationAnnotation(name="SimpleTestApp")
-  public static class SimpleTestApplicationWithName extends SimpleTestApplication
-  {
-  };
-
-  private static final Logger LOG = LoggerFactory.getLogger(LogicalPlanConfigurationTest.class);
-}
-


[11/50] [abbrv] incubator-apex-core git commit: APEX-111 #resolve show-logical-plan command should check the first arg whether it's an app package before counting arguments

Posted by vr...@apache.org.
APEX-111 #resolve show-logical-plan command should check the first arg whether it's an app package before counting arguments


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/6c242594
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/6c242594
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/6c242594

Branch: refs/heads/feature-module
Commit: 6c2425948a50a6c76a93e359356cbb387155ed22
Parents: 9d83a44
Author: David Yan <da...@datatorrent.com>
Authored: Thu Sep 10 14:03:34 2015 -0700
Committer: Chandni Singh <cs...@apache.org>
Committed: Thu Sep 10 15:00:23 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/cli/DTCli.java   | 143 +++++++++----------
 1 file changed, 68 insertions(+), 75 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/6c242594/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java b/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
index 53ad0ca..6ac1b9d 100644
--- a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
+++ b/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
@@ -2777,95 +2777,88 @@ public class DTCli
         }
       }
 
-      if (commandLineInfo.args.length >= 2) {
-        String jarfile = expandFileName(commandLineInfo.args[0], true);
-        AppPackage ap = null;
+      if (commandLineInfo.args.length > 0) {
+        String filename = expandFileName(commandLineInfo.args[0], true);
+
         // see if the first argument is actually an app package
         try {
-          ap = new AppPackage(new File(jarfile));
-        }
-        catch (Exception ex) {
-          // fall through
-        }
-        if (ap != null) {
+          AppPackage ap = new AppPackage(new File(filename));
+          ap.close();
           new ShowLogicalPlanAppPackageCommand().execute(args, reader);
           return;
+        } catch (Exception ex) {
+          // fall through
         }
-        String appName = commandLineInfo.args[1];
-        StramAppLauncher submitApp = getStramAppLauncher(jarfile, config, commandLineInfo.ignorePom);
-        submitApp.loadDependencies();
-        List<AppFactory> matchingAppFactories = getMatchingAppFactories(submitApp, appName, commandLineInfo.exactMatch);
-        if (matchingAppFactories == null || matchingAppFactories.isEmpty()) {
-          throw new CliException("No application in jar file matches '" + appName + "'");
-        }
-        else if (matchingAppFactories.size() > 1) {
-          throw new CliException("More than one application in jar file match '" + appName + "'");
-        }
-        else {
-          Map<String, Object> map = new HashMap<String, Object>();
-          PrintStream originalStream = System.out;
-          AppFactory appFactory = matchingAppFactories.get(0);
-          try {
-            if (raw) {
-              PrintStream dummyStream = new PrintStream(new OutputStream()
-              {
-                @Override
-                public void write(int b)
+
+        if (commandLineInfo.args.length >= 2) {
+          String appName = commandLineInfo.args[1];
+          StramAppLauncher submitApp = getStramAppLauncher(filename, config, commandLineInfo.ignorePom);
+          submitApp.loadDependencies();
+          List<AppFactory> matchingAppFactories = getMatchingAppFactories(submitApp, appName, commandLineInfo.exactMatch);
+          if (matchingAppFactories == null || matchingAppFactories.isEmpty()) {
+            throw new CliException("No application in jar file matches '" + appName + "'");
+          } else if (matchingAppFactories.size() > 1) {
+            throw new CliException("More than one application in jar file match '" + appName + "'");
+          } else {
+            Map<String, Object> map = new HashMap<String, Object>();
+            PrintStream originalStream = System.out;
+            AppFactory appFactory = matchingAppFactories.get(0);
+            try {
+              if (raw) {
+                PrintStream dummyStream = new PrintStream(new OutputStream()
                 {
-                  // no-op
-                }
+                  @Override
+                  public void write(int b)
+                  {
+                    // no-op
+                  }
 
-              });
-              System.setOut(dummyStream);
+                });
+                System.setOut(dummyStream);
+              }
+              LogicalPlan logicalPlan = appFactory.createApp(submitApp.getLogicalPlanConfiguration());
+              map.put("applicationName", appFactory.getName());
+              map.put("logicalPlan", LogicalPlanSerializer.convertToMap(logicalPlan));
+            } finally {
+              if (raw) {
+                System.setOut(originalStream);
+              }
             }
+            printJson(map);
+          }
+        } else {
+          if (filename.endsWith(".json")) {
+            File file = new File(filename);
+            StramAppLauncher submitApp = new StramAppLauncher(file.getName(), config);
+            AppFactory appFactory = new StramAppLauncher.JsonFileAppFactory(file);
             LogicalPlan logicalPlan = appFactory.createApp(submitApp.getLogicalPlanConfiguration());
+            Map<String, Object> map = new HashMap<String, Object>();
             map.put("applicationName", appFactory.getName());
             map.put("logicalPlan", LogicalPlanSerializer.convertToMap(logicalPlan));
-          }
-          finally {
-            if (raw) {
-              System.setOut(originalStream);
+            printJson(map);
+          } else if (filename.endsWith(".properties")) {
+            File file = new File(filename);
+            StramAppLauncher submitApp = new StramAppLauncher(file.getName(), config);
+            AppFactory appFactory = new StramAppLauncher.PropertyFileAppFactory(file);
+            LogicalPlan logicalPlan = appFactory.createApp(submitApp.getLogicalPlanConfiguration());
+            Map<String, Object> map = new HashMap<String, Object>();
+            map.put("applicationName", appFactory.getName());
+            map.put("logicalPlan", LogicalPlanSerializer.convertToMap(logicalPlan));
+            printJson(map);
+          } else {
+            StramAppLauncher submitApp = getStramAppLauncher(filename, config, commandLineInfo.ignorePom);
+            submitApp.loadDependencies();
+            List<Map<String, Object>> appList = new ArrayList<Map<String, Object>>();
+            List<AppFactory> appFactoryList = submitApp.getBundledTopologies();
+            for (AppFactory appFactory : appFactoryList) {
+              Map<String, Object> m = new HashMap<String, Object>();
+              m.put("name", appFactory.getName());
+              appList.add(m);
             }
+            printJson(appList, "applications");
           }
-          printJson(map);
         }
-      }
-      else if (commandLineInfo.args.length == 1) {
-        String filename = expandFileName(commandLineInfo.args[0], true);
-        if (filename.endsWith(".json")) {
-          File file = new File(filename);
-          StramAppLauncher submitApp = new StramAppLauncher(file.getName(), config);
-          AppFactory appFactory = new StramAppLauncher.JsonFileAppFactory(file);
-          LogicalPlan logicalPlan = appFactory.createApp(submitApp.getLogicalPlanConfiguration());
-          Map<String, Object> map = new HashMap<String, Object>();
-          map.put("applicationName", appFactory.getName());
-          map.put("logicalPlan", LogicalPlanSerializer.convertToMap(logicalPlan));
-          printJson(map);
-        }
-        else if (filename.endsWith(".properties")) {
-          File file = new File(filename);
-          StramAppLauncher submitApp = new StramAppLauncher(file.getName(), config);
-          AppFactory appFactory = new StramAppLauncher.PropertyFileAppFactory(file);
-          LogicalPlan logicalPlan = appFactory.createApp(submitApp.getLogicalPlanConfiguration());
-          Map<String, Object> map = new HashMap<String, Object>();
-          map.put("applicationName", appFactory.getName());
-          map.put("logicalPlan", LogicalPlanSerializer.convertToMap(logicalPlan));
-          printJson(map);
-        }
-        else {
-          StramAppLauncher submitApp = getStramAppLauncher(filename, config, commandLineInfo.ignorePom);
-          submitApp.loadDependencies();
-          List<Map<String, Object>> appList = new ArrayList<Map<String, Object>>();
-          List<AppFactory> appFactoryList = submitApp.getBundledTopologies();
-          for (AppFactory appFactory : appFactoryList) {
-            Map<String, Object> m = new HashMap<String, Object>();
-            m.put("name", appFactory.getName());
-            appList.add(m);
-          }
-          printJson(appList, "applications");
-        }
-      }
-      else {
+      } else {
         if (currentApp == null) {
           throw new CliException("No application selected");
         }


[18/50] [abbrv] incubator-apex-core git commit: APEX-121 #resolve

Posted by vr...@apache.org.
APEX-121 #resolve


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/89aefea4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/89aefea4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/89aefea4

Branch: refs/heads/feature-module
Commit: 89aefea4c0b5ac8b40efd2a191b81142a8c302f9
Parents: c349090
Author: Gaurav <ga...@datatorrent.com>
Authored: Sat Sep 12 22:50:11 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Sat Sep 12 22:50:11 2015 -0700

----------------------------------------------------------------------
 .../com/datatorrent/stram/stream/BufferServerPublisher.java | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/89aefea4/engine/src/main/java/com/datatorrent/stram/stream/BufferServerPublisher.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/stream/BufferServerPublisher.java b/engine/src/main/java/com/datatorrent/stram/stream/BufferServerPublisher.java
index d8e09c8..39cf667 100644
--- a/engine/src/main/java/com/datatorrent/stram/stream/BufferServerPublisher.java
+++ b/engine/src/main/java/com/datatorrent/stram/stream/BufferServerPublisher.java
@@ -112,7 +112,14 @@ public class BufferServerPublisher extends Publisher implements ByteCounterStrea
          * if there is any state write that for the subscriber before we write the data.
          */
         if (dsp.state != null) {
-          write(DataTuple.getSerializedTuple(MessageType.CODEC_STATE_VALUE, dsp.state));
+          array = DataTuple.getSerializedTuple(MessageType.CODEC_STATE_VALUE, dsp.state);
+          try {
+            while (!write(array)) {
+              sleep(5);
+            }
+          } catch (InterruptedException ie) {
+            throw new RuntimeException(ie);
+          }
         }
         /*
          * Now that the state if any has been sent, we can proceed with the actual data we want to send.


[49/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-42_UnifierAttr_final' of github.com:chaithu14/incubator-apex-core into chaithu-unifier-config

Posted by vr...@apache.org.
Merge branch 'APEX-42_UnifierAttr_final' of github.com:chaithu14/incubator-apex-core into chaithu-unifier-config


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/de1d0032
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/de1d0032
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/de1d0032

Branch: refs/heads/feature-module
Commit: de1d0032af89b03b5bd39442e6b692937a84857b
Parents: 282c43b 9b78c67
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Mon Sep 21 10:41:26 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Mon Sep 21 10:41:26 2015 -0700

----------------------------------------------------------------------
 .../plan/logical/LogicalPlanConfiguration.java  | 37 ++++++++++++++++---
 .../logical/LogicalPlanConfigurationTest.java   | 39 ++++++++++++++++++++
 2 files changed, 71 insertions(+), 5 deletions(-)
----------------------------------------------------------------------



[43/50] [abbrv] incubator-apex-core git commit: APEX-120 #comment using different check point location

Posted by vr...@apache.org.
APEX-120 #comment using different check point location


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/2c081b45
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/2c081b45
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/2c081b45

Branch: refs/heads/feature-module
Commit: 2c081b45ebf4c57b66075a993f53fa721aa45c93
Parents: 7503dde
Author: Gaurav <ga...@datatorrent.com>
Authored: Tue Sep 15 09:35:50 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Thu Sep 17 16:40:32 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/common/util/AsyncFSStorageAgent.java   | 5 ++---
 .../com/datatorrent/common/util/AsyncFSStorageAgentTest.java    | 1 -
 2 files changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/2c081b45/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
index 374917a..3965925 100644
--- a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
+++ b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
@@ -21,6 +21,7 @@
 package com.datatorrent.common.util;
 
 import java.io.*;
+import java.nio.file.Files;
 import java.util.EnumSet;
 
 import org.apache.hadoop.conf.Configuration;
@@ -49,9 +50,7 @@ public class AsyncFSStorageAgent extends FSStorageAgent
   {
     super(path, conf);
     try {
-      File tempFile = File.createTempFile("msp", "msp");
-      this.localBasePath = new File(tempFile.getParent(), "localcheckpoint").getAbsolutePath();
-      tempFile.delete();
+      this.localBasePath = Files.createTempDirectory("chkp").toString();
     } catch (IOException ex) {
       throw new RuntimeException(ex);
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/2c081b45/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java b/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
index 892d221..a1504e4 100644
--- a/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
+++ b/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
@@ -63,7 +63,6 @@ public class AsyncFSStorageAgentTest
     {
       try {
         FileUtils.deleteDirectory(new File("target/" + description.getClassName()));
-        FileUtils.deleteDirectory(new File(FileUtils.getTempDirectory(), "localcheckpoint"));
       } catch (IOException e) {
         throw new RuntimeException(e);
       }


[45/50] [abbrv] incubator-apex-core git commit: APEX-131 #resolve Added .travis.yml file to trigger the build as well to test integrated slack notifications.

Posted by vr...@apache.org.
APEX-131 #resolve Added .travis.yml file to trigger the build as well to test integrated slack notifications.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/e482804a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/e482804a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/e482804a

Branch: refs/heads/feature-module
Commit: e482804a856f2a5c8e3591af14cf552093017fe4
Parents: d2f73e3
Author: Brennon York <bo...@apache.org>
Authored: Fri Sep 18 13:58:18 2015 -0700
Committer: Brennon York <bo...@apache.org>
Committed: Fri Sep 18 13:58:18 2015 -0700

----------------------------------------------------------------------
 .travis.yml | 20 ++++++++++++++++++++
 pom.xml     | 11 +++++++++++
 2 files changed, 31 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/e482804a/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..9b18190
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+language: java
+
+notifications:
+  slack:
+    secure: GlWD2EjgNC6Lg2DtAfZuKhg2RTHE0FMeyfHH24D7TKmV49gRVTzTTqvExwOdLLYDDIu966eOF5w90/PfiD11A4rpm6+WyjRHDkpOhTyjBKWc2btMqNwiP1hRf2uKEG6A+RgszaQJ4HkGiMxIpDJ3o/jaTpBseOeA399t8Z7Pkd6obXVAEMcnm2XtfUPzIBqGblVkiecS3OzbkzjKWaOG6+nlp+ajVO7MazsOR05JNZ1MXnDOK/Qq/7xLBtweF4r/O8okyg52fST4pGqk4JTTI++bVFoRySpjNWSSJbdrfkWG/7h0sfqMcwMj8TpZqojcCuTvZih+IyPZwX3MP6Ls4bF6OFcD3BvWg049WbstA6ZdKnVW9fOiJoJ+Bx9gNI0tKtYeDt/8nMBfSRVMmzRVEGdTOEEpFs7n79OVVytwbp8qYFU+waqlG1/tMQvCclSaxuY8d236Ybg410KLiMQ0YhQ8ZaTVagHu2l0KrfV16Xq3/CRsolHa8k7PxDzmCxdi07Ao/mrorQLmJWoc7FkxEw6ZrEGHP17HTn5uidoTkANgGak4AVgrJm6zLKdAERxvr1KMnfyuJLT1ZK1x73SV+3EpdzcMLxvq2dMaBpZcfARiYB04EwSHVBdm/D0AIZtRL2s1rG85y1OtCkDMd04ZEvbwdKYZ+fD+HjTn7Zo7AKM=

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/e482804a/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 87c34f1..fc0983f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,6 +22,17 @@
     </license>
   </licenses>
 
+  <repositories>
+    <repository>
+      <snapshots>
+        <enabled>false</enabled>
+      </snapshots>
+      <id>Datatorrent-Releases</id>
+      <name>DataTorrent Release Repository</name>
+      <url>https://www.datatorrent.com/maven/content/repositories/releases/</url>
+    </repository>
+  </repositories>
+
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <netbeans.hint.license>datatorrent-inc</netbeans.hint.license>


[09/50] [abbrv] incubator-apex-core git commit: APEX-101 #resolve store memory allocated in megabytes instead of bytes

Posted by vr...@apache.org.
APEX-101 #resolve store memory allocated in megabytes instead of bytes


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/b82f9052
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/b82f9052
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/b82f9052

Branch: refs/heads/feature-module
Commit: b82f90523f01019443220db7ceefa9f44c055e8e
Parents: b57972b
Author: David Yan <da...@datatorrent.com>
Authored: Wed Sep 9 18:18:29 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Thu Sep 10 14:22:27 2015 -0700

----------------------------------------------------------------------
 .../com/datatorrent/stram/StreamingContainerManager.java     | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b82f9052/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index 7944a4b..41738f4 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@ -174,7 +174,7 @@ public class StreamingContainerManager implements PlanContext
   private long completeEndWindowStatsWindowId;
   private final ConcurrentHashMap<String, MovingAverageLong> rpcLatencies = new ConcurrentHashMap<String, MovingAverageLong>();
   private final AtomicLong nodeToStramRequestIds = new AtomicLong(1);
-  private long allocatedMemoryBytes = 0;
+  private int allocatedMemoryMB = 0;
   private List<AppDataSource> appDataSources = null;
   private final Cache<Long, Object> commandResponse = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
   private long lastLatencyWarningTime;
@@ -395,7 +395,7 @@ public class StreamingContainerManager implements PlanContext
       }
       if (nmHttpPort != null) {
         String nodeHttpAddress = nmHost + ":" + nmHttpPort;
-        if (allocatedMemoryBytes == 0) {
+        if (allocatedMemoryMB == 0) {
           String url = ConfigUtils.getSchemePrefix(conf) + nodeHttpAddress + "/ws/v1/node/containers/" + ci.id;
           WebServicesClient webServicesClient = new WebServicesClient();
           try {
@@ -403,7 +403,7 @@ public class StreamingContainerManager implements PlanContext
             JSONObject json = new JSONObject(content);
             int totalMemoryNeededMB = json.getJSONObject("container").getInt("totalMemoryNeededMB");
             if (totalMemoryNeededMB > 0) {
-              allocatedMemoryBytes = totalMemoryNeededMB * 1024 * 1024;
+              allocatedMemoryMB = totalMemoryNeededMB;
             } else {
               LOG.warn("Could not determine the memory allocated for the streaming application master.  Node manager is reporting {} MB from {}", totalMemoryNeededMB, url);
             }
@@ -416,7 +416,7 @@ public class StreamingContainerManager implements PlanContext
         ci.rawContainerLogsUrl = ConfigUtils.getRawContainerLogsUrl(conf, nodeHttpAddress, plan.getLogicalPlan().getAttributes().get(LogicalPlan.APPLICATION_ID), ci.id);
       }
     }
-    ci.memoryMBAllocated = (int)(allocatedMemoryBytes / (1024 * 1024));
+    ci.memoryMBAllocated = allocatedMemoryMB;
     ci.memoryMBFree = ((int)(Runtime.getRuntime().freeMemory() / (1024 * 1024)));
     ci.lastHeartbeat = -1;
     ci.startedTime = startTime;


[04/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-93' of git://github.com/ishark/incubator-apex-core into isha-apex-93

Posted by vr...@apache.org.
Merge branch 'APEX-93' of git://github.com/ishark/incubator-apex-core into isha-apex-93


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/8eb81f7c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/8eb81f7c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/8eb81f7c

Branch: refs/heads/feature-module
Commit: 8eb81f7c6bcb17d209542ab2221e6738353317dd
Parents: 09f716e 3178f13
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Wed Sep 9 16:40:11 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Wed Sep 9 16:40:11 2015 -0700

----------------------------------------------------------------------
 .../StreamCodecWrapperForPersistance.java       |  2 +-
 .../stram/plan/physical/PhysicalPlan.java       | 28 +++++++++++++++++++-
 .../stram/plan/StreamPersistanceTests.java      | 13 +++++++++
 3 files changed, 41 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8eb81f7c/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
----------------------------------------------------------------------


[32/50] [abbrv] incubator-apex-core git commit: APEX-125 #resolve #comment add long-lived option for app package

Posted by vr...@apache.org.
APEX-125 #resolve #comment add long-lived option for app package


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/cecdf4c7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/cecdf4c7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/cecdf4c7

Branch: refs/heads/feature-module
Commit: cecdf4c7351eb3605acc7864bb50b1724993181d
Parents: 2cd917d
Author: siyuan <si...@datatorrent.com>
Authored: Tue Sep 15 13:10:38 2015 -0700
Committer: siyuan <si...@datatorrent.com>
Committed: Tue Sep 15 13:10:38 2015 -0700

----------------------------------------------------------------------
 .../datatorrent/stram/client/AppPackage.java    | 47 ++++++++++++++++++--
 .../stram/client/AppPackageTest.java            | 13 ++++++
 2 files changed, 57 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/cecdf4c7/engine/src/main/java/com/datatorrent/stram/client/AppPackage.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/client/AppPackage.java b/engine/src/main/java/com/datatorrent/stram/client/AppPackage.java
index 5698807..f67860d 100644
--- a/engine/src/main/java/com/datatorrent/stram/client/AppPackage.java
+++ b/engine/src/main/java/com/datatorrent/stram/client/AppPackage.java
@@ -36,7 +36,7 @@ import org.slf4j.LoggerFactory;
  *
  * @since 1.0.3
  */
-public class AppPackage extends JarFile implements Closeable
+public class AppPackage extends JarFile
 {
   public static final String ATTRIBUTE_DT_ENGINE_VERSION = "DT-Engine-Version";
   public static final String ATTRIBUTE_DT_APP_PACKAGE_NAME = "DT-App-Package-Name";
@@ -63,6 +63,7 @@ public class AppPackage extends JarFile implements Closeable
   private final Set<String> configs = new TreeSet<String>();
 
   private final File resourcesDirectory;
+  private final boolean cleanOnClose;
 
   public static class AppInfo
   {
@@ -97,14 +98,27 @@ public class AppPackage extends JarFile implements Closeable
    * If app directory is to be processed, there may be resource leak in the class loader. Only pass true for short-lived
    * applications
    *
+   * If contentFolder is not null, it will try to create the contentFolder, file will be retained on disk after App Package is closed
+   * If contentFolder is null, temp folder will be created and will be cleaned on close()
+   *
    * @param file
+   * @param contentFolder  the folder that the app package will be extracted to
    * @param processAppDirectory
    * @throws java.io.IOException
    * @throws net.lingala.zip4j.exception.ZipException
    */
-  public AppPackage(File file, boolean processAppDirectory) throws IOException, ZipException
+  public AppPackage(File file, File contentFolder, boolean processAppDirectory) throws IOException, ZipException
   {
     super(file);
+
+    if (contentFolder != null) {
+      FileUtils.forceMkdir(contentFolder);
+      cleanOnClose = false;
+    } else {
+      cleanOnClose =  true;
+      contentFolder = new File("/tmp/dt-appPackage-" + Long.toString(System.nanoTime()));
+    }
+
     Manifest manifest = getManifest();
     if (manifest == null) {
       throw new IOException("Not a valid app package. MANIFEST.MF is not present.");
@@ -120,7 +134,7 @@ public class AppPackage extends JarFile implements Closeable
       throw new IOException("Not a valid app package.  Class-Path is missing from MANIFEST.MF");
     }
     classPath.addAll(Arrays.asList(StringUtils.split(classPathString, " ")));
-    directory = new File("/tmp/dt-appPackage-" + Long.toString(System.nanoTime()));
+    directory = contentFolder;
     extractToDirectory(directory, file);
     if (processAppDirectory) {
       processAppDirectory(new File(directory, "app"));
@@ -148,6 +162,25 @@ public class AppPackage extends JarFile implements Closeable
     }
   }
 
+  /**
+   * Creates an App Package object.
+   *
+   * If app directory is to be processed, there may be resource leak in the class loader. Only pass true for short-lived
+   * applications
+   *
+   * Files in app package will be extracted to tmp folder and will be cleaned on close()
+   * The close() method could be explicitly called or implicitly called by GC finalize()
+   *
+   * @param file
+   * @param processAppDirectory
+   * @throws java.io.IOException
+   * @throws net.lingala.zip4j.exception.ZipException
+   */
+  public AppPackage(File file, boolean processAppDirectory) throws IOException, ZipException
+  {
+    this(file, null, processAppDirectory);
+  }
+
   public static void extractToDirectory(File directory, File appPackageFile) throws ZipException
   {
     ZipFile zipFile = new ZipFile(appPackageFile);
@@ -177,7 +210,15 @@ public class AppPackage extends JarFile implements Closeable
   public void close() throws IOException
   {
     super.close();
+    if (cleanOnClose) {
+      cleanContent();
+    }
+  }
+
+  public void cleanContent() throws IOException
+  {
     FileUtils.deleteDirectory(directory);
+    LOG.debug("App Package {}-{} folder {} is removed", appPackageName, appPackageVersion, directory.getAbsolutePath());
   }
 
   public String getAppPackageName()

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/cecdf4c7/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java b/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
index bf41270..6d70eeb 100644
--- a/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
@@ -18,6 +18,7 @@ package com.datatorrent.stram.client;
 import com.datatorrent.stram.support.StramTestSupport;
 import com.datatorrent.stram.util.JSONSerializationProvider;
 import net.lingala.zip4j.exception.ZipException;
+import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
@@ -37,6 +38,8 @@ import org.junit.BeforeClass;
 public class AppPackageTest
 {
   private static AppPackage ap;
+  //yet another app package which retains the files
+  private static AppPackage yap;
   private static JSONSerializationProvider jomp;
   private static JSONObject json;
 
@@ -49,6 +52,9 @@ public class AppPackageTest
       File file = StramTestSupport.createAppPackageFile();
       // Set up test instance
       ap = new AppPackage(file, true);
+      // set up another instance
+      File testfolder = new File("target/testapp");
+      yap = new AppPackage(file, testfolder, false);
       jomp = new JSONSerializationProvider();
       json = new JSONObject(jomp.getContext(null).writeValueAsString(ap));
 
@@ -58,6 +64,9 @@ public class AppPackageTest
       throw new RuntimeException(e);
     } catch (JSONException e) {
       throw new RuntimeException(e);
+    } finally {
+      IOUtils.closeQuietly(ap);
+      IOUtils.closeQuietly(yap);
     }
   }
 
@@ -82,6 +91,10 @@ public class AppPackageTest
     JSONObject dag = application.getJSONObject("dag");
     Assert.assertTrue("There is at least one stream", dag.getJSONArray("streams").length() >= 1);
     Assert.assertEquals("There are two operator", 2, dag.getJSONArray("operators").length());
+
+    Assert.assertTrue("app package extraction folder should be retained", new File("target/testapp").exists());
+    yap.cleanContent();
+    Assert.assertTrue("app package extraction folder should be removed", !new File("target/testapp").exists());
   }
 
   @Test


[36/50] [abbrv] incubator-apex-core git commit: APEX-28 #resolve

Posted by vr...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java b/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java
new file mode 100644
index 0000000..077e3a9
--- /dev/null
+++ b/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java
@@ -0,0 +1,1511 @@
+/**
+ * Copyright (C) 2015 DataTorrent, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *         http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.datatorrent.stram.plan.logical;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.StringWriter;
+import java.lang.reflect.Field;
+
+import java.util.*;
+
+import javax.validation.ValidationException;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Assert;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.Assert.*;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.mutable.MutableBoolean;
+import org.apache.hadoop.conf.Configuration;
+
+import com.datatorrent.api.*;
+import com.datatorrent.api.Attribute.AttributeMap;
+import com.datatorrent.api.Attribute.AttributeMap.AttributeInitializer;
+import com.datatorrent.api.Context.DAGContext;
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.Context.PortContext;
+import com.datatorrent.api.StringCodec.Integer2String;
+import com.datatorrent.api.annotation.ApplicationAnnotation;
+
+import com.datatorrent.common.codec.JsonStreamCodec;
+import com.datatorrent.common.util.BasicContainerOptConfigurator;
+import com.datatorrent.stram.PartitioningTest.PartitionLoadWatch;
+import com.datatorrent.stram.client.StramClientUtils;
+import com.datatorrent.stram.engine.GenericTestOperator;
+import com.datatorrent.stram.engine.TestGeneratorInputOperator;
+import com.datatorrent.stram.plan.SchemaTestOperator;
+import com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta;
+import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
+import com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta;
+import com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta;
+import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.AttributeParseUtils;
+import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.ConfElement;
+import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.ContextUtils;
+import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.StramElement;
+import com.datatorrent.stram.plan.logical.LogicalPlanTest.ValidationTestOperator;
+import com.datatorrent.stram.support.StramTestSupport.RegexMatcher;
+
+public class LogicalPlanConfigurationTest {
+
+  static {
+    @SuppressWarnings("MismatchedReadAndWriteOfArray")
+    Object serial[] = new Object[] {MockContext1.serialVersionUID, MockContext2.serialVersionUID};
+  }
+
+  private static OperatorMeta assertNode(LogicalPlan dag, String id) {
+      OperatorMeta n = dag.getOperatorMeta(id);
+      assertNotNull("operator exists id=" + id, n);
+      return n;
+  }
+
+  /**
+   * Test read from dt-site.xml in Hadoop configuration format.
+   */
+  @Test
+  public void testLoadFromConfigXml() {
+    Configuration conf = new Configuration(false);
+    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
+
+    LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
+
+    LogicalPlan dag = new LogicalPlan();
+    builder.populateDAG(dag);
+    dag.validate();
+
+    assertEquals("number of operator confs", 6, dag.getAllOperators().size());
+
+    OperatorMeta operator1 = assertNode(dag, "operator1");
+    OperatorMeta operator2 = assertNode(dag, "operator2");
+    OperatorMeta operator3 = assertNode(dag, "operator3");
+    OperatorMeta operator4 = assertNode(dag, "operator4");
+
+    assertNotNull("operatorConf for root", operator1);
+    assertEquals("operatorId set", "operator1", operator1.getName());
+
+    // verify operator instantiation
+    assertEquals(operator1.getOperator().getClass(), TestGeneratorInputOperator.class);
+    TestGeneratorInputOperator GenericTestNode = (TestGeneratorInputOperator)operator1.getOperator();
+    assertEquals("myStringPropertyValue", GenericTestNode.getMyStringProperty());
+
+    // check links
+    assertEquals("operator1 inputs", 0, operator1.getInputStreams().size());
+    assertEquals("operator1 outputs", 1, operator1.getOutputStreams().size());
+    StreamMeta n1n2 = operator2.getInputStreams().get(operator2.getMeta(((GenericTestOperator)operator2.getOperator()).inport1));
+    assertNotNull("n1n2", n1n2);
+
+    // output/input stream object same
+    assertEquals("rootNode out is operator2 in", n1n2, operator1.getOutputStreams().get(operator1.getMeta(((TestGeneratorInputOperator)operator1.getOperator()).outport)));
+    assertEquals("n1n2 source", operator1, n1n2.getSource().getOperatorMeta());
+    Assert.assertEquals("n1n2 targets", 1, n1n2.getSinks().size());
+    Assert.assertEquals("n1n2 target", operator2, n1n2.getSinks().get(0).getOperatorWrapper());
+
+    assertEquals("stream name", "n1n2", n1n2.getName());
+    Assert.assertEquals("n1n2 not inline (default)", null, n1n2.getLocality());
+
+    // operator 2 streams to operator 3 and operator 4
+    assertEquals("operator 2 number of outputs", 1, operator2.getOutputStreams().size());
+    StreamMeta fromNode2 = operator2.getOutputStreams().values().iterator().next();
+
+    Set<OperatorMeta> targetNodes = Sets.newHashSet();
+    for (LogicalPlan.InputPortMeta ip : fromNode2.getSinks()) {
+      targetNodes.add(ip.getOperatorWrapper());
+    }
+    Assert.assertEquals("outputs " + fromNode2, Sets.newHashSet(operator3, operator4), targetNodes);
+
+    OperatorMeta operator6 = assertNode(dag, "operator6");
+
+    List<OperatorMeta> rootNodes = dag.getRootOperators();
+    assertEquals("number root operators", 2, rootNodes.size());
+    assertTrue("root operator2", rootNodes.contains(operator1));
+    assertTrue("root operator6", rootNodes.contains(operator6));
+
+    for (OperatorMeta n : rootNodes) {
+      printTopology(n, dag, 0);
+    }
+
+  }
+
+  private void printTopology(OperatorMeta operator, DAG tplg, int level) {
+      String prefix = "";
+      if (level > 0) {
+        prefix = StringUtils.repeat(" ", 20*(level-1)) + "   |" + StringUtils.repeat("-", 17);
+      }
+      logger.debug(prefix  + operator.getName());
+      for (StreamMeta downStream : operator.getOutputStreams().values()) {
+          if (!downStream.getSinks().isEmpty()) {
+            for (LogicalPlan.InputPortMeta targetNode : downStream.getSinks()) {
+              printTopology(targetNode.getOperatorWrapper(), tplg, level+1);
+            }
+          }
+      }
+  }
+
+  @Test
+  public void testLoadFromPropertiesFile() throws IOException
+  {
+      Properties props = new Properties();
+      String resourcePath = "/testTopology.properties";
+      InputStream is = this.getClass().getResourceAsStream(resourcePath);
+      if (is == null) {
+        fail("Could not load " + resourcePath);
+      }
+      props.load(is);
+      LogicalPlanConfiguration pb = new LogicalPlanConfiguration(new Configuration(false))
+            .addFromProperties(props, null);
+
+      LogicalPlan dag = new LogicalPlan();
+      pb.populateDAG(dag);
+      dag.validate();
+
+      assertEquals("number of operator confs", 5, dag.getAllOperators().size());
+      assertEquals("number of root operators", 1, dag.getRootOperators().size());
+
+      StreamMeta s1 = dag.getStream("n1n2");
+      assertNotNull(s1);
+      assertTrue("n1n2 inline", DAG.Locality.CONTAINER_LOCAL == s1.getLocality());
+
+      OperatorMeta operator3 = dag.getOperatorMeta("operator3");
+      assertEquals("operator3.classname", GenericTestOperator.class, operator3.getOperator().getClass());
+
+      GenericTestOperator doperator3 = (GenericTestOperator)operator3.getOperator();
+      assertEquals("myStringProperty " + doperator3, "myStringPropertyValueFromTemplate", doperator3.getMyStringProperty());
+      assertFalse("booleanProperty " + doperator3, doperator3.booleanProperty);
+
+      OperatorMeta operator4 = dag.getOperatorMeta("operator4");
+      GenericTestOperator doperator4 = (GenericTestOperator)operator4.getOperator();
+      assertEquals("myStringProperty " + doperator4, "overrideOperator4", doperator4.getMyStringProperty());
+      assertEquals("setterOnlyOperator4 " + doperator4, "setterOnlyOperator4", doperator4.propertySetterOnly);
+      assertTrue("booleanProperty " + doperator4, doperator4.booleanProperty);
+
+      StreamMeta input1 = dag.getStream("inputStream");
+      assertNotNull(input1);
+      Assert.assertEquals("input1 source", dag.getOperatorMeta("inputOperator"), input1.getSource().getOperatorMeta());
+      Set<OperatorMeta> targetNodes = Sets.newHashSet();
+      for (LogicalPlan.InputPortMeta targetPort : input1.getSinks()) {
+        targetNodes.add(targetPort.getOperatorWrapper());
+      }
+
+      Assert.assertEquals("input1 target ", Sets.newHashSet(dag.getOperatorMeta("operator1"), operator3, operator4), targetNodes);
+
+  }
+
+  @Test
+  public void testLoadFromJson() throws Exception
+  {
+    String resourcePath = "/testTopology.json";
+    InputStream is = this.getClass().getResourceAsStream(resourcePath);
+    if (is == null) {
+      fail("Could not load " + resourcePath);
+    }
+    StringWriter writer = new StringWriter();
+
+    IOUtils.copy(is, writer);
+    JSONObject json = new JSONObject(writer.toString());
+
+    Configuration conf = new Configuration(false);
+    conf.set(StreamingApplication.DT_PREFIX + "operator.operator3.prop.myStringProperty", "o3StringFromConf");
+
+    LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
+    LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
+    dag.validate();
+
+    assertEquals("DAG attribute CONTAINER_JVM_OPTIONS ", dag.getAttributes().get(DAGContext.CONTAINER_JVM_OPTIONS), "-Xmx16m");
+    Map<Class<?>, Class<? extends StringCodec<?>>> stringCodecsMap = Maps.newHashMap();
+    stringCodecsMap.put(Integer.class, Integer2String.class);
+    assertEquals("DAG attribute STRING_CODECS ", stringCodecsMap, dag.getAttributes().get(DAGContext.STRING_CODECS));
+    assertEquals("DAG attribute CONTAINER_OPTS_CONFIGURATOR ", BasicContainerOptConfigurator.class, dag.getAttributes().get(DAGContext.CONTAINER_OPTS_CONFIGURATOR).getClass());
+
+    assertEquals("number of operator confs", 5, dag.getAllOperators().size());
+    assertEquals("number of root operators", 1, dag.getRootOperators().size());
+
+    StreamMeta s1 = dag.getStream("n1n2");
+    assertNotNull(s1);
+    assertTrue("n1n2 inline", DAG.Locality.CONTAINER_LOCAL == s1.getLocality());
+
+    OperatorMeta input = dag.getOperatorMeta("inputOperator");
+    TestStatsListener tsl = new TestStatsListener();
+    tsl.setIntProp(222);
+    List<StatsListener> sll = Lists.<StatsListener>newArrayList(tsl);
+    assertEquals("inputOperator STATS_LISTENERS attribute ", sll, input.getAttributes().get(OperatorContext.STATS_LISTENERS));
+    for(OutputPortMeta opm : input.getOutputStreams().keySet()){
+      assertTrue("output port of input Operator attribute is JsonStreamCodec ", opm.getAttributes().get(PortContext.STREAM_CODEC) instanceof JsonStreamCodec<?>);
+    }
+
+    OperatorMeta operator3 = dag.getOperatorMeta("operator3");
+    assertEquals("operator3.classname", GenericTestOperator.class, operator3.getOperator().getClass());
+
+    GenericTestOperator doperator3 = (GenericTestOperator)operator3.getOperator();
+    assertEquals("myStringProperty " + doperator3, "o3StringFromConf", doperator3.getMyStringProperty());
+    assertFalse("booleanProperty " + doperator3, doperator3.booleanProperty);
+
+    OperatorMeta operator4 = dag.getOperatorMeta("operator4");
+    GenericTestOperator doperator4 = (GenericTestOperator)operator4.getOperator();
+    assertEquals("myStringProperty " + doperator4, "overrideOperator4", doperator4.getMyStringProperty());
+    assertEquals("setterOnlyOperator4 " + doperator4, "setterOnlyOperator4", doperator4.propertySetterOnly);
+    assertTrue("booleanProperty " + doperator4, doperator4.booleanProperty);
+
+    StreamMeta input1 = dag.getStream("inputStream");
+    assertNotNull(input1);
+    OperatorMeta inputOperator = dag.getOperatorMeta("inputOperator");
+    Assert.assertEquals("input1 source", inputOperator, input1.getSource().getOperatorMeta());
+    Set<OperatorMeta> targetNodes = Sets.newHashSet();
+    for (LogicalPlan.InputPortMeta targetPort : input1.getSinks()) {
+      targetNodes.add(targetPort.getOperatorWrapper());
+    }
+    Assert.assertEquals("operator attribute " + inputOperator, 64, (int)inputOperator.getValue(OperatorContext.MEMORY_MB));
+    Assert.assertEquals("port attribute " + inputOperator, 8, (int)input1.getSource().getValue(PortContext.UNIFIER_LIMIT));
+    Assert.assertEquals("input1 target ", Sets.newHashSet(dag.getOperatorMeta("operator1"), operator3, operator4), targetNodes);
+  }
+
+  @Test
+  @SuppressWarnings("UnnecessaryBoxing")
+  public void testAppLevelAttributes()
+  {
+    String appName = "app1";
+
+    Properties props = new Properties();
+    props.put(StreamingApplication.DT_PREFIX + DAG.MASTER_MEMORY_MB.getName(), "123");
+    props.put(StreamingApplication.DT_PREFIX + DAG.CONTAINER_JVM_OPTIONS.getName(), "-Dlog4j.properties=custom_log4j.properties");
+    props.put(StreamingApplication.DT_PREFIX + DAG.APPLICATION_PATH.getName(), "/defaultdir");
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + "." + DAG.APPLICATION_PATH.getName(), "/otherdir");
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + "." + DAG.STREAMING_WINDOW_SIZE_MILLIS.getName(), "1000");
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    LogicalPlan dag = new LogicalPlan();
+
+    dagBuilder.populateDAG(dag);
+
+    dagBuilder.setApplicationConfiguration(dag, appName, null);
+
+    Assert.assertEquals("", "/otherdir", dag.getValue(DAG.APPLICATION_PATH));
+    Assert.assertEquals("", Integer.valueOf(123), dag.getValue(DAG.MASTER_MEMORY_MB));
+    Assert.assertEquals("", Integer.valueOf(1000), dag.getValue(DAG.STREAMING_WINDOW_SIZE_MILLIS));
+    Assert.assertEquals("", "-Dlog4j.properties=custom_log4j.properties", dag.getValue(DAG.CONTAINER_JVM_OPTIONS));
+
+  }
+  @Test
+  @SuppressWarnings("UnnecessaryBoxing")
+  public void testAppLevelProperties() {
+	  String appName ="app1";
+	  Properties props =new Properties();
+	  props.put(StreamingApplication.DT_PREFIX + "application."+appName+".testprop1","10");
+	  props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".prop.testprop2", "100");
+	  props.put(StreamingApplication.DT_PREFIX + "application.*.prop.testprop3","1000");
+	  props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".inncls.a", "10000");
+	  LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+	  LogicalPlan dag = new LogicalPlan();
+	  TestApplication app1Test=new TestApplication();
+
+	  dagBuilder.setApplicationConfiguration(dag, appName,app1Test);
+	  Assert.assertEquals("",Integer.valueOf(10),app1Test.getTestprop1());
+	  Assert.assertEquals("",Integer.valueOf(100),app1Test.getTestprop2());
+	  Assert.assertEquals("",Integer.valueOf(1000),app1Test.getTestprop3());
+	  Assert.assertEquals("",Integer.valueOf(10000),app1Test.getInncls().getA());
+  }
+
+  @Test
+  public void testPrepareDAG() {
+    final MutableBoolean appInitialized = new MutableBoolean(false);
+    StreamingApplication app = new StreamingApplication() {
+      @Override
+      public void populateDAG(DAG dag, Configuration conf)
+      {
+        Assert.assertEquals("", "hostname:9090", dag.getValue(DAG.GATEWAY_CONNECT_ADDRESS));
+        dag.setAttribute(DAG.GATEWAY_CONNECT_ADDRESS, "hostname:9091");
+        appInitialized.setValue(true);
+      }
+    };
+    Configuration conf = new Configuration(false);
+    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
+    LogicalPlanConfiguration pb = new LogicalPlanConfiguration(conf);
+
+    LogicalPlan dag = new LogicalPlan();
+    pb.prepareDAG(dag, app, "testconfig");
+
+    Assert.assertTrue("populateDAG called", appInitialized.booleanValue());
+    Assert.assertEquals("populateDAG overrides attribute", "hostname:9091", dag.getValue(DAG.GATEWAY_CONNECT_ADDRESS));
+  }
+
+  @Test
+  public void testOperatorConfigurationLookup() {
+
+    Properties props = new Properties();
+
+    // match operator by name
+    props.put(StreamingApplication.DT_PREFIX + "template.matchId1.matchIdRegExp", ".*operator1.*");
+    props.put(StreamingApplication.DT_PREFIX + "template.matchId1.stringProperty2", "stringProperty2Value-matchId1");
+    props.put(StreamingApplication.DT_PREFIX + "template.matchId1.nested.property", "nested.propertyValue-matchId1");
+
+    // match class name, lower priority
+    props.put(StreamingApplication.DT_PREFIX + "template.matchClass1.matchClassNameRegExp", ".*" + ValidationTestOperator.class.getSimpleName());
+    props.put(StreamingApplication.DT_PREFIX + "template.matchClass1.stringProperty2", "stringProperty2Value-matchClass1");
+
+    // match class name
+    props.put(StreamingApplication.DT_PREFIX + "template.t2.matchClassNameRegExp", ".*"+GenericTestOperator.class.getSimpleName());
+    props.put(StreamingApplication.DT_PREFIX + "template.t2.myStringProperty", "myStringPropertyValue");
+
+    // direct setting
+    props.put(StreamingApplication.DT_PREFIX + "operator.operator3.emitFormat", "emitFormatValue");
+
+    LogicalPlan dag = new LogicalPlan();
+    Operator operator1 = dag.addOperator("operator1", new ValidationTestOperator());
+    Operator operator2 = dag.addOperator("operator2", new ValidationTestOperator());
+    Operator operator3 = dag.addOperator("operator3", new GenericTestOperator());
+
+    LogicalPlanConfiguration pb = new LogicalPlanConfiguration(new Configuration(false));
+    LOG.debug("calling addFromProperties");
+    pb.addFromProperties(props, null);
+
+    Map<String, String> configProps = pb.getProperties(dag.getMeta(operator1), "appName");
+    Assert.assertEquals("" + configProps, 2, configProps.size());
+    Assert.assertEquals("" + configProps, "stringProperty2Value-matchId1", configProps.get("stringProperty2"));
+    Assert.assertEquals("" + configProps, "nested.propertyValue-matchId1", configProps.get("nested.property"));
+
+    configProps = pb.getProperties(dag.getMeta(operator2), "appName");
+    Assert.assertEquals("" + configProps, 1, configProps.size());
+    Assert.assertEquals("" + configProps, "stringProperty2Value-matchClass1", configProps.get("stringProperty2"));
+
+    configProps = pb.getProperties(dag.getMeta(operator3), "appName");
+    Assert.assertEquals("" + configProps, 2, configProps.size());
+    Assert.assertEquals("" + configProps, "myStringPropertyValue", configProps.get("myStringProperty"));
+    Assert.assertEquals("" + configProps, "emitFormatValue", configProps.get("emitFormat"));
+
+  }
+
+  @Test
+  public void testSetOperatorProperties() {
+
+    Configuration conf = new Configuration(false);
+    conf.set(StreamingApplication.DT_PREFIX + "operator.o1.prop.myStringProperty", "myStringPropertyValue");
+    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.prop.stringArrayField", "a,b,c");
+    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.prop.mapProperty.key1", "key1Val");
+    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.prop.mapProperty(key1.dot)", "key1dotVal");
+    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.prop.mapProperty(key2.dot)", "key2dotVal");
+
+    LogicalPlan dag = new LogicalPlan();
+    GenericTestOperator o1 = dag.addOperator("o1", new GenericTestOperator());
+    ValidationTestOperator o2 = dag.addOperator("o2", new ValidationTestOperator());
+
+    LogicalPlanConfiguration pb = new LogicalPlanConfiguration(conf);
+
+    pb.setOperatorProperties(dag, "testSetOperatorProperties");
+    Assert.assertEquals("o1.myStringProperty", "myStringPropertyValue", o1.getMyStringProperty());
+    Assert.assertArrayEquals("o2.stringArrayField", new String[] {"a", "b", "c"}, o2.getStringArrayField());
+
+    Assert.assertEquals("o2.mapProperty.key1", "key1Val", o2.getMapProperty().get("key1"));
+    Assert.assertEquals("o2.mapProperty(key1.dot)", "key1dotVal", o2.getMapProperty().get("key1.dot"));
+    Assert.assertEquals("o2.mapProperty(key2.dot)", "key2dotVal", o2.getMapProperty().get("key2.dot"));
+
+  }
+
+  @ApplicationAnnotation(name="AnnotatedAlias")
+  class AnnotatedApplication implements StreamingApplication {
+
+    @Override
+    public void populateDAG(DAG dag, Configuration conf)
+    {
+    }
+
+  }
+
+  @Test
+  public void testAppNameAttribute() {
+    StreamingApplication app = new AnnotatedApplication();
+    Configuration conf = new Configuration(false);
+    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
+
+    LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
+
+    Properties properties = new Properties();
+    properties.put(StreamingApplication.DT_PREFIX + "application.TestAliasApp.class", app.getClass().getName());
+
+    builder.addFromProperties(properties, null);
+
+    LogicalPlan dag = new LogicalPlan();
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+    dag.setAttribute(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME, "testApp");
+    builder.prepareDAG(dag, app, appPath);
+
+    Assert.assertEquals("Application name", "testApp", dag.getAttributes().get(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME));
+  }
+
+  @Test
+  public void testAppAlias() {
+    StreamingApplication app = new AnnotatedApplication();
+    Configuration conf = new Configuration(false);
+    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
+
+    LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
+
+    Properties properties = new Properties();
+    properties.put(StreamingApplication.DT_PREFIX + "application.TestAliasApp.class", app.getClass().getName());
+
+    builder.addFromProperties(properties, null);
+
+    LogicalPlan dag = new LogicalPlan();
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+    builder.prepareDAG(dag, app, appPath);
+
+    Assert.assertEquals("Application name", "TestAliasApp", dag.getAttributes().get(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME));
+  }
+
+
+  @Test
+  public void testAppAnnotationAlias() {
+    StreamingApplication app = new AnnotatedApplication();
+    Configuration conf = new Configuration(false);
+    conf.addResource(StramClientUtils.DT_SITE_XML_FILE);
+
+    LogicalPlanConfiguration builder = new LogicalPlanConfiguration(conf);
+
+    LogicalPlan dag = new LogicalPlan();
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+    builder.prepareDAG(dag, app, appPath);
+
+    Assert.assertEquals("Application name", "AnnotatedAlias", dag.getAttributes().get(com.datatorrent.api.Context.DAGContext.APPLICATION_NAME));
+  }
+
+  @Test
+  @SuppressWarnings( {"UnnecessaryBoxing", "AssertEqualsBetweenInconvertibleTypes"})
+  public void testOperatorLevelAttributes() {
+    String appName = "app1";
+    StreamingApplication app = new StreamingApplication() {
+      @Override
+      public void populateDAG(DAG dag, Configuration conf)
+      {
+        dag.addOperator("operator1", GenericTestOperator.class);
+        dag.addOperator("operator2", GenericTestOperator.class);
+      }
+    };
+
+    Properties props = new Properties();
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
+    props.put(StreamingApplication.DT_PREFIX + "operator.*." + OperatorContext.APPLICATION_WINDOW_COUNT.getName(), "2");
+    props.put(StreamingApplication.DT_PREFIX + "operator.*." + OperatorContext.STATS_LISTENERS.getName(), PartitionLoadWatch.class.getName());
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1." + OperatorContext.APPLICATION_WINDOW_COUNT.getName(), "20");
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    Assert.assertEquals("", Integer.valueOf(20), dag.getOperatorMeta("operator1").getValue(OperatorContext.APPLICATION_WINDOW_COUNT));
+    Assert.assertEquals("", Integer.valueOf(2), dag.getOperatorMeta("operator2").getValue(OperatorContext.APPLICATION_WINDOW_COUNT));
+    Assert.assertEquals("", PartitionLoadWatch.class, dag.getOperatorMeta("operator2").getValue(OperatorContext.STATS_LISTENERS).toArray()[0].getClass());
+  }
+
+  @Test
+  public void testOperatorLevelProperties() {
+    String appName = "app1";
+    final GenericTestOperator operator1 = new GenericTestOperator();
+    final GenericTestOperator operator2 = new GenericTestOperator();
+    StreamingApplication app = new StreamingApplication() {
+      @Override
+      public void populateDAG(DAG dag, Configuration conf)
+      {
+        dag.addOperator("operator1", operator1);
+        dag.addOperator("operator2", operator2);
+      }
+    };
+
+    Properties props = new Properties();
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
+    props.put(StreamingApplication.DT_PREFIX + "operator.*.myStringProperty", "pv1");
+    props.put(StreamingApplication.DT_PREFIX + "operator.*.booleanProperty", Boolean.TRUE.toString());
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1.myStringProperty", "apv1");
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    Assert.assertEquals("apv1", operator1.getMyStringProperty());
+    Assert.assertEquals("pv1", operator2.getMyStringProperty());
+    Assert.assertEquals(true, operator2.isBooleanProperty());
+  }
+
+  @Test
+  public void testApplicationLevelParameter()
+  {
+    String appName = "app1";
+    final GenericTestOperator operator1 = new GenericTestOperator();
+    final GenericTestOperator operator2 = new GenericTestOperator();
+    StreamingApplication app = new StreamingApplication()
+    {
+      @Override
+      public void populateDAG(DAG dag, Configuration conf)
+      {
+        dag.addOperator("operator1", operator1);
+        dag.addOperator("operator2", operator2);
+      }
+    };
+
+    Properties props = new Properties();
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
+    props.put(StreamingApplication.DT_PREFIX + "operator.*.myStringProperty", "foo ${xyz} bar ${zzz} baz");
+    props.put(StreamingApplication.DT_PREFIX + "operator.*.booleanProperty", Boolean.TRUE.toString());
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1.myStringProperty", "apv1");
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+
+    Configuration vars = new Configuration(false);
+    vars.set("xyz", "123");
+    vars.set("zzz", "456");
+    dagBuilder.addFromProperties(props, vars);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    Assert.assertEquals("apv1", operator1.getMyStringProperty());
+    Assert.assertEquals("foo 123 bar 456 baz", operator2.getMyStringProperty());
+    Assert.assertEquals(true, operator2.isBooleanProperty());
+  }
+
+  @Test
+  @SuppressWarnings("UnnecessaryBoxing")
+  public void testPortLevelAttributes() {
+    String appName = "app1";
+    SimpleTestApplication app = new SimpleTestApplication();
+
+    Properties props = new Properties();
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1.port.*." + PortContext.QUEUE_CAPACITY.getName(), "" + 16 * 1024);
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator2.inputport.inport1." + PortContext.QUEUE_CAPACITY.getName(), "" + 32 * 1024);
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator2.outputport.outport1." + PortContext.QUEUE_CAPACITY.getName(), "" + 32 * 1024);
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator3.port.*." + PortContext.QUEUE_CAPACITY.getName(), "" + 16 * 1024);
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator3.inputport.inport2." + PortContext.QUEUE_CAPACITY.getName(), "" + 32 * 1024);
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    OperatorMeta om1 = dag.getOperatorMeta("operator1");
+    Assert.assertEquals("", Integer.valueOf(16 * 1024), om1.getMeta(app.gt1.outport1).getValue(PortContext.QUEUE_CAPACITY));
+    OperatorMeta om2 = dag.getOperatorMeta("operator2");
+    Assert.assertEquals("", Integer.valueOf(32 * 1024), om2.getMeta(app.gt2.inport1).getValue(PortContext.QUEUE_CAPACITY));
+    Assert.assertEquals("", Integer.valueOf(32 * 1024), om2.getMeta(app.gt2.outport1).getValue(PortContext.QUEUE_CAPACITY));
+    OperatorMeta om3 = dag.getOperatorMeta("operator3");
+    Assert.assertEquals("", Integer.valueOf(16 * 1024), om3.getMeta(app.gt3.inport1).getValue(PortContext.QUEUE_CAPACITY));
+    Assert.assertEquals("", Integer.valueOf(32 * 1024), om3.getMeta(app.gt3.inport2).getValue(PortContext.QUEUE_CAPACITY));
+  }
+
+
+  @Test
+  public void testInvalidAttribute() throws Exception {
+    Assert.assertNotSame(0, com.datatorrent.api.Context.DAGContext.serialVersionUID);
+    Attribute<String> attribute = new Attribute<>("", null);
+
+    Field nameField = Attribute.class.getDeclaredField("name");
+    nameField.setAccessible(true);
+    nameField.set(attribute, "NOT_CONFIGURABLE");
+    nameField.setAccessible(false);
+
+    ContextUtils.addAttribute(com.datatorrent.api.Context.DAGContext.class, attribute);
+    AttributeParseUtils.initialize();
+    ConfElement.initialize();
+
+    // attribute that cannot be configured
+
+    Properties props = new Properties();
+    props.put(StreamingApplication.DT_PREFIX + "attr.NOT_CONFIGURABLE", "value");
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    try {
+      dagBuilder.prepareDAG(new LogicalPlan(), null, "");
+      Assert.fail("Exception expected");
+    } catch (Exception e) {
+      Assert.assertThat("Attribute not configurable", e.getMessage(), RegexMatcher.matches("Attribute does not support property configuration: NOT_CONFIGURABLE.*"));
+    }
+
+    ContextUtils.removeAttribute(com.datatorrent.api.Context.DAGContext.class, attribute);
+    AttributeParseUtils.initialize();
+    ConfElement.initialize();
+
+    // invalid attribute name
+    props = new Properties();
+    String invalidAttribute = StreamingApplication.DT_PREFIX + "attr.INVALID_NAME";
+    props.put(invalidAttribute, "value");
+
+    try {
+      new LogicalPlanConfiguration(new Configuration(false)).addFromProperties(props, null);
+      Assert.fail("Exception expected");
+    } catch (Exception e) {
+      LOG.debug("Exception message: {}", e);
+      Assert.assertThat("Invalid attribute name", e.getMessage(), RegexMatcher.matches("Invalid attribute reference: " + invalidAttribute));
+    }
+  }
+
+  @Test
+  public void testAttributesCodec() {
+    Assert.assertNotSame(null, new Long[] {com.datatorrent.api.Context.DAGContext.serialVersionUID, OperatorContext.serialVersionUID, PortContext.serialVersionUID});
+    @SuppressWarnings("unchecked")
+    Set<Class<? extends Context>> contextClasses = Sets.newHashSet(com.datatorrent.api.Context.DAGContext.class, OperatorContext.class, PortContext.class);
+    for (Class<?> c : contextClasses) {
+      for (Attribute<Object> attr : AttributeInitializer.getAttributes(c)) {
+        Assert.assertNotNull(attr.name + " codec", attr.codec);
+      }
+    }
+  }
+
+  @Test
+  public void testTupleClassAttr() throws Exception
+  {
+    String resourcePath = "/schemaTestTopology.json";
+    InputStream is = this.getClass().getResourceAsStream(resourcePath);
+    if (is == null) {
+      fail("Could not load " + resourcePath);
+    }
+    StringWriter writer = new StringWriter();
+
+    IOUtils.copy(is, writer);
+    JSONObject json = new JSONObject(writer.toString());
+
+    Configuration conf = new Configuration(false);
+
+    LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
+    LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
+    dag.validate();
+
+    OperatorMeta operator1 = dag.getOperatorMeta("operator1");
+    assertEquals("operator1.classname", SchemaTestOperator.class, operator1.getOperator().getClass());
+
+    StreamMeta input1 = dag.getStream("inputStream");
+    assertNotNull(input1);
+    for (LogicalPlan.InputPortMeta targetPort : input1.getSinks()) {
+      Assert.assertEquals("tuple class name required", TestSchema.class, targetPort.getAttributes().get(PortContext.TUPLE_CLASS));
+    }
+  }
+
+  @Test(expected = ValidationException.class)
+  public void testTupleClassAttrValidation() throws Exception
+  {
+    String resourcePath = "/schemaTestTopology.json";
+    InputStream is = this.getClass().getResourceAsStream(resourcePath);
+    if (is == null) {
+      fail("Could not load " + resourcePath);
+    }
+    StringWriter writer = new StringWriter();
+
+    IOUtils.copy(is, writer);
+    JSONObject json = new JSONObject(writer.toString());
+
+    //removing schema so that validation fails
+    json.getJSONArray("streams").getJSONObject(0).remove("schema");
+    Configuration conf = new Configuration(false);
+
+    LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
+    LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
+
+    dag.validate();
+  }
+
+  @Test
+  public void testTestTupleClassAttrSetFromConfig()
+  {
+    Configuration conf = new Configuration(false);
+    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.port.schemaRequiredPort.attr.TUPLE_CLASS",
+      "com.datatorrent.stram.plan.logical.LogicalPlanConfigurationTest$TestSchema");
+
+    StreamingApplication streamingApplication = new StreamingApplication()
+    {
+      @Override
+      public void populateDAG(DAG dag, Configuration conf)
+      {
+        TestGeneratorInputOperator o1 = dag.addOperator("o1", new TestGeneratorInputOperator());
+        SchemaTestOperator o2 = dag.addOperator("o2", new SchemaTestOperator());
+        dag.addStream("stream", o1.outport, o2.schemaRequiredPort);
+      }
+    };
+    LogicalPlan dag = new LogicalPlan();
+    LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(conf);
+    lpc.prepareDAG(dag, streamingApplication, "app");
+    dag.validate();
+  }
+
+  /*
+   * This test and all of the following ambiguous attribute tests verify that when an ambiguous attribute
+   * name is provided, all the corresponding attributes are set.
+   * <br/><br/>
+   * <b>Note:</b> Ambiguous attribute means that when multiple attributes with the same
+   * simple name exist for multiple types of dag elements (like operators and ports).
+   * An example of such attributes are the com.datatorrent.api.Context.OperatorContext.AUTO_RECORD
+   * and com.datatorrent.api.Context.PortContext.AUTO_RECORD.
+   * <br/><br/>
+   * This test should set the attribute on the operators and ports.
+   */
+  /**
+   * This test checks if an ambiguous DAG attribute does not get set on operators.
+   */
+  @Test
+  public void testDagAttributeNotSetOnOperator()
+  {
+    dagOperatorAttributeHelper(true);
+  }
+
+  @Test
+  public void testAmbiguousAttributeSetOnOperatorAndNotDAG()
+  {
+    dagOperatorAttributeHelper(false);
+  }
+
+  private void dagOperatorAttributeHelper(boolean attrOnDag)
+  {
+    String attributeName = null;
+
+    if (attrOnDag) {
+      attributeName = DAGContext.CHECKPOINT_WINDOW_COUNT.getSimpleName();
+    }
+    else {
+      attributeName = OperatorContext.class.getCanonicalName() + LogicalPlanConfiguration.KEY_SEPARATOR + DAGContext.CHECKPOINT_WINDOW_COUNT.getSimpleName();
+    }
+
+    Properties props = new Properties();
+    String propName = StreamingApplication.DT_PREFIX + StramElement.ATTR.getValue() + LogicalPlanConfiguration.KEY_SEPARATOR + attributeName;
+    props.put(propName, "5");
+
+    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    OperatorMeta om1 = dag.getOperatorMeta("operator1");
+
+    if (attrOnDag) {
+      Assert.assertNotEquals((Integer)5, om1.getValue(OperatorContext.CHECKPOINT_WINDOW_COUNT));
+    } else {
+      Assert.assertEquals((Integer)5, om1.getValue(OperatorContext.CHECKPOINT_WINDOW_COUNT));
+    }
+  }
+
+  /**
+   * This test should set the attribute on the operators and ports.
+   */
+  @Test
+  public void testRootLevelAmbiguousAttributeSimple()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD,
+                                       StreamingApplication.DT_PREFIX, null, Boolean.TRUE, true, true);
+  }
+
+  /**
+   * This test should set the attribute on the operators and ports.
+   */
+  @Test
+  public void testApplicationLevelAmbiguousAttributeSimple()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD,
+                                       StreamingApplication.DT_PREFIX + "application" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                       "*" + LogicalPlanConfiguration.KEY_SEPARATOR, null, Boolean.TRUE, true, true);
+  }
+
+  /**
+   * This should only set the attribute on the operator
+   */
+  @Test
+  public void testOperatorLevelAmbiguousAttributeSimple()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD,
+                                       StreamingApplication.DT_PREFIX + "operator" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                       "*" + LogicalPlanConfiguration.KEY_SEPARATOR, null, Boolean.TRUE, true, false);
+  }
+
+  /**
+   * This should only set the attribute on the port
+   */
+  @Test
+  public void testPortLevelAmbiguousAttributeSimple()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD,
+                                       StreamingApplication.DT_PREFIX + "port" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                       "*" + LogicalPlanConfiguration.KEY_SEPARATOR, null, Boolean.TRUE, false, true);
+  }
+
+  /**
+   * This test should set the attribute on the operators and ports.
+   */
+  @Test
+  public void testRootLevelAmbiguousAttributeComplex()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD, StreamingApplication.DT_PREFIX,
+                                       PortContext.class.getCanonicalName(), Boolean.TRUE, false, true);
+  }
+
+  /**
+   * This test should set the attribute on the operators and ports.
+   */
+  @Test
+  public void testApplicationLevelAmbiguousAttributeComplex()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD,
+                                       StreamingApplication.DT_PREFIX + "application" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                       "*" + LogicalPlanConfiguration.KEY_SEPARATOR, PortContext.class.getCanonicalName(),
+                                       Boolean.TRUE, false, true);
+  }
+
+  /**
+   * This should only set the attribute on the operator
+   */
+  @Test
+  public void testOperatorLevelAmbiguousAttributeComplex()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD,
+                                       StreamingApplication.DT_PREFIX + "operator" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                       "*" + LogicalPlanConfiguration.KEY_SEPARATOR, OperatorContext.class.getCanonicalName(),
+                                       Boolean.TRUE, true, false);
+  }
+
+  /**
+   * This should only set the attribute on the port
+   */
+  @Test
+  public void testOperatorLevelAmbiguousAttributeComplex2()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD,
+                                       StreamingApplication.DT_PREFIX + "operator" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                       "*" + LogicalPlanConfiguration.KEY_SEPARATOR, PortContext.class.getCanonicalName(),
+                                       Boolean.TRUE, false, true);
+  }
+
+  /**
+   * This should only set the attribute on the port
+   */
+  @Test
+  public void testPortLevelAmbiguousAttributeComplex()
+  {
+    testAttributeAmbiguousSimpleHelper(Context.OperatorContext.AUTO_RECORD, Context.PortContext.AUTO_RECORD,
+                                       StreamingApplication.DT_PREFIX + "port" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                       "*" + LogicalPlanConfiguration.KEY_SEPARATOR, PortContext.class.getCanonicalName(),
+                                       Boolean.TRUE, false, true);
+  }
+
+  private void testAttributeAmbiguousSimpleHelper(Attribute<?> attributeObjOperator, Attribute<?> attributeObjPort,
+                                                  String root, String contextClass, Object val, boolean operatorSet,
+                                                  boolean portSet)
+  {
+    Properties props = propertiesBuilder(attributeObjOperator.getSimpleName(), root, contextClass, val);
+
+    simpleAttributeOperatorHelperAssert(attributeObjOperator, props, val, operatorSet);
+
+    simpleNamePortAssertHelperAssert(attributeObjPort, props, val, portSet);
+  }
+
+  @Test
+  public void testRootLevelAttributeSimpleNameOperator()
+  {
+    simpleAttributeOperatorHelper(OperatorContext.MEMORY_MB, StreamingApplication.DT_PREFIX, true, 4096, true, true);
+  }
+
+  @Test
+  public void testRootLevelStorageAgentSimpleNameOperator()
+  {
+    MockStorageAgent mockAgent = new MockStorageAgent();
+
+    simpleAttributeOperatorHelper(OperatorContext.STORAGE_AGENT, StreamingApplication.DT_PREFIX, true, mockAgent, true, false);
+  }
+
+  @Test
+  public void testRootLevelAttributeSimpleNameOperatorNoScope()
+  {
+    simpleAttributeOperatorHelper(OperatorContext.MEMORY_MB, StreamingApplication.DT_PREFIX, true, 4096, true, false);
+  }
+
+  @Test
+  public void testApplicationLevelAttributeSimpleNameOperator()
+  {
+    simpleAttributeOperatorHelper(OperatorContext.MEMORY_MB, StreamingApplication.DT_PREFIX + "application" +
+                                  LogicalPlanConfiguration.KEY_SEPARATOR + "SimpleTestApp" + LogicalPlanConfiguration.KEY_SEPARATOR,
+                                  true, 4096, true, true);
+  }
+
+  private void simpleAttributeOperatorHelper(Attribute<?> attributeObj, String root, boolean simpleName,
+                                             Object val, boolean set, boolean scope)
+  {
+    Properties props = propertiesBuilderOperator(attributeObj.getSimpleName(), root, simpleName,
+                                                 val, scope);
+
+    simpleAttributeOperatorHelperAssert(attributeObj, props, val, set);
+  }
+
+  private void simpleAttributeOperatorHelperAssert(Attribute<?> attributeObj, Properties props, Object val, boolean set)
+  {
+    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    OperatorMeta om1 = dag.getOperatorMeta("operator1");
+
+    if (set) {
+      Assert.assertEquals(val, om1.getValue(attributeObj));
+    } else {
+      Assert.assertNotEquals(val, om1.getValue(attributeObj));
+    }
+
+    OperatorMeta om2 = dag.getOperatorMeta("operator2");
+
+    if (set) {
+      Assert.assertEquals(val, om2.getValue(attributeObj));
+    } else {
+      Assert.assertNotEquals(val, om2.getValue(attributeObj));
+    }
+
+    OperatorMeta om3 = dag.getOperatorMeta("operator3");
+
+    if (set) {
+      Assert.assertEquals(val, om3.getValue(attributeObj));
+    } else {
+      Assert.assertNotEquals(val, om3.getValue(attributeObj));
+    }
+  }
+
+  /* Port tests */
+  @Test
+  public void testRootLevelAttributeSimpleNamePort()
+  {
+    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX,
+                              true, (Integer)4096, true, true);
+  }
+
+  @Test
+  public void testRootLevelAttributeSimpleNamePortNoScope()
+  {
+    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX,
+                              true, (Integer)4096, true, false);
+  }
+
+  @Test
+  public void testOperatorLevelAttributeSimpleNamePort()
+  {
+    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "operator" +
+                              LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR,
+                              true, (Integer)4096, true, true);
+  }
+
+  @Test
+  public void testApplicationLevelAttributeSimpleNamePort()
+  {
+    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "application" +
+                              LogicalPlanConfiguration.KEY_SEPARATOR + "SimpleTestApp" + LogicalPlanConfiguration.KEY_SEPARATOR,
+                              true, (Integer)4096, true, true);
+  }
+
+  @Test
+  public void testRootLevelAttributeComplexNamePort()
+  {
+    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX, false,
+                              (Integer)4096, true, true);
+  }
+
+  @Test
+  public void testRootLevelAttributeComplexNamePortNoScope()
+  {
+    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX, false,
+                              (Integer)4096, true, false);
+  }
+
+  @Test
+  public void testOperatorLevelAttributeComplexNamePort()
+  {
+    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "operator" +
+                              LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR,
+                              false, (Integer)4096, true, true);
+  }
+
+  @Test
+  public void testApplicationLevelAttributeComplexNamePort()
+  {
+    simpleAttributePortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "application" +
+                              LogicalPlanConfiguration.KEY_SEPARATOR + "SimpleTestApp" + LogicalPlanConfiguration.KEY_SEPARATOR,
+                              false, (Integer)4096, true, true);
+  }
+
+  /* Input port tests */
+  @Test
+  public void testRootLevelAttributeSimpleNameInputPort()
+  {
+    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX, true,
+                                   (Integer)4096, true);
+  }
+
+  @Test
+  public void testOperatorLevelAttributeSimpleNameInputPort()
+  {
+    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "operator" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                   "*" + LogicalPlanConfiguration.KEY_SEPARATOR, true, (Integer)4096, true);
+  }
+
+  @Test
+  public void testApplicationLevelAttributeSimpleNameInputPort()
+  {
+    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "application" +
+                                   LogicalPlanConfiguration.KEY_SEPARATOR + "SimpleTestApp" + LogicalPlanConfiguration.KEY_SEPARATOR,
+                                   true, (Integer)4096, true);
+  }
+
+  @Test
+  public void testRootLevelAttributeComplexNameInputPort()
+  {
+    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX, false, (Integer)4096, true);
+  }
+
+  @Test
+  public void testOperatorLevelAttributeComplexNameInputPort()
+  {
+    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "operator" +
+                                   LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR, false,
+                                   (Integer)4096, true);
+  }
+
+  @Test
+  public void testApplicationLevelAttributeComplexNameInputPort()
+  {
+    simpleAttributeInputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "application" +
+                                   LogicalPlanConfiguration.KEY_SEPARATOR + "SimpleTestApp" + LogicalPlanConfiguration.KEY_SEPARATOR,
+                                   false, (Integer)4096, true);
+  }
+
+  /* Output port tests */
+  @Test
+  public void testRootLevelAttributeSimpleNameOutputPort()
+  {
+    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX, true, (Integer)4096, true);
+  }
+
+  @Test
+  public void testOperatorLevelAttributeSimpleNameOutputPort()
+  {
+    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "operator" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                    "*" + LogicalPlanConfiguration.KEY_SEPARATOR, true, (Integer)4096, true);
+  }
+
+  @Test
+  public void testApplicationLevelAttributeSimpleNameOutputPort()
+  {
+    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "application" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                    "SimpleTestApp" + LogicalPlanConfiguration.KEY_SEPARATOR, true, (Integer)4096, true);
+  }
+
+  @Test
+  public void testRootLevelAttributeComplexNameOutputPort()
+  {
+    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX, false, (Integer)4096, true);
+  }
+
+  @Test
+  public void testOperatorLevelAttributeComplexNameOutputPort()
+  {
+    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "operator" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                    "*" + LogicalPlanConfiguration.KEY_SEPARATOR, false, (Integer)4096, true);
+  }
+
+  @Test
+  public void testApplicationLevelAttributeComplexNameOutputPort()
+  {
+    simpleAttributeOutputPortHelper(PortContext.QUEUE_CAPACITY, StreamingApplication.DT_PREFIX + "application" + LogicalPlanConfiguration.KEY_SEPARATOR +
+                                    "SimpleTestApp" + LogicalPlanConfiguration.KEY_SEPARATOR, false, (Integer)4096, true);
+  }
+
+  /* Helpers for building ports */
+  private void simpleAttributePortHelper(Attribute<?> attributeObj, String root, boolean simpleName, Object val, boolean set, boolean scope)
+  {
+    Properties props = propertiesBuilderPort(attributeObj.getSimpleName(), root, simpleName, val, scope);
+
+    simpleNamePortAssertHelperAssert(attributeObj, props, val, set);
+  }
+
+  private void simpleAttributeInputPortHelper(Attribute<?> attributeObj, String root, boolean simpleName, Object val, boolean set)
+  {
+    Properties props = propertiesBuilderInputPort(attributeObj.getSimpleName(), root, simpleName, val);
+
+    simpleNameInputPortAssertHelperAssert(attributeObj, props, val, set);
+    simpleNameOutputPortAssertHelperAssert(attributeObj, props, val, !set);
+  }
+
+  private void simpleAttributeOutputPortHelper(Attribute<?> attributeObj, String root, boolean simpleName, Object val, boolean set)
+  {
+    Properties props = propertiesBuilderOutputPort(attributeObj.getSimpleName(), root, simpleName, val);
+
+    simpleNameOutputPortAssertHelperAssert(attributeObj, props, val, set);
+    simpleNameInputPortAssertHelperAssert(attributeObj, props, val, !set);
+  }
+
+  private void simpleNamePortAssertHelperAssert(Attribute<?> attributeObj, Properties props, Object val, boolean set)
+  {
+    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    simpleNamePortAssertHelper(attributeObj, dag, "operator1", val, set);
+    simpleNamePortAssertHelper(attributeObj, dag, "operator2", val, set);
+    simpleNamePortAssertHelper(attributeObj, dag, "operator3", val, set);
+  }
+
+  private void simpleNameInputPortAssertHelperAssert(Attribute<?> attributeObj, Properties props, Object val, boolean set)
+  {
+    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    simpleNameInputPortAssertHelper(attributeObj, dag, "operator1", val, set);
+    simpleNameInputPortAssertHelper(attributeObj, dag, "operator2", val, set);
+    simpleNameInputPortAssertHelper(attributeObj, dag, "operator3", val, set);
+  }
+
+  private void simpleNameOutputPortAssertHelperAssert(Attribute<?> attributeObj, Properties props, Object val, boolean set)
+  {
+    SimpleTestApplicationWithName app = new SimpleTestApplicationWithName();
+
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    simpleNameOutputPortAssertHelper(attributeObj, dag, "operator1", val, set);
+    simpleNameOutputPortAssertHelper(attributeObj, dag, "operator2", val, set);
+    simpleNameOutputPortAssertHelper(attributeObj, dag, "operator3", val, set);
+  }
+
+  private void simpleNamePortAssertHelper(Attribute<?> attributeObj, LogicalPlan dag, String operatorName, Object queueCapacity, boolean set)
+  {
+    simpleNameInputPortAssertHelper(attributeObj, dag, operatorName, queueCapacity, set);
+    simpleNameOutputPortAssertHelper(attributeObj, dag, operatorName, queueCapacity, set);
+  }
+
+  private void simpleNameInputPortAssertHelper(Attribute<?> attributeObj, LogicalPlan dag, String operatorName, Object queueCapacity, boolean set)
+  {
+    OperatorMeta operatorMeta = dag.getOperatorMeta(operatorName);
+
+    for (InputPortMeta inputPortMeta: operatorMeta.getInputStreams().keySet()) {
+      if (set) {
+        Assert.assertEquals(queueCapacity, inputPortMeta.getValue(attributeObj));
+      } else {
+        Assert.assertNotEquals(queueCapacity, inputPortMeta.getValue(attributeObj));
+      }
+    }
+  }
+
+  private void simpleNameOutputPortAssertHelper(Attribute<?> attributeObj, LogicalPlan dag, String operatorName, Object queueCapacity, boolean set)
+  {
+    OperatorMeta operatorMeta = dag.getOperatorMeta(operatorName);
+
+    for (OutputPortMeta outputPortMeta: operatorMeta.getOutputStreams().keySet()) {
+      if (set) {
+        Assert.assertEquals(queueCapacity, outputPortMeta.getValue(attributeObj));
+      } else {
+        Assert.assertNotEquals(queueCapacity, outputPortMeta.getValue(attributeObj));
+      }
+    }
+  }
+
+  /* Helpers for building properties */
+  private Properties propertiesBuilder(String attributeName, String root, String contextClass, Object val)
+  {
+    boolean simpleName = contextClass == null;
+
+    if (!simpleName) {
+      attributeName = contextClass + LogicalPlanConfiguration.KEY_SEPARATOR + attributeName;
+    }
+
+    Properties props = new Properties();
+    String propName = root + StramElement.ATTR.getValue() + LogicalPlanConfiguration.KEY_SEPARATOR + attributeName;
+    props.put(propName, val.toString());
+
+    return props;
+  }
+
+  private Properties propertiesBuilderOperator(String attributeName, String root, boolean simpleName, Object val, boolean addOperator)
+  {
+    String contextClass = simpleName ? null : OperatorContext.class.getCanonicalName();
+
+    if (addOperator) {
+      root += "operator" + LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR;
+    }
+
+    return propertiesBuilder(attributeName, root, contextClass, val);
+  }
+
+  private Properties propertiesBuilderPort(String attributeName, String root, boolean simpleName, Object val, boolean addPort)
+  {
+    String contextClass = simpleName ? null : PortContext.class.getCanonicalName();
+
+    if (addPort) {
+      root += "port" + LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR;
+    }
+
+    return propertiesBuilder(attributeName, root, contextClass, val);
+  }
+
+  private Properties propertiesBuilderInputPort(String attributeName, String root, boolean simpleName, Object val)
+  {
+    String contextClass = simpleName ? null: PortContext.class.getCanonicalName();
+
+    root += "inputport" + LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR;
+
+    return propertiesBuilder(attributeName, root, contextClass, val);
+  }
+
+  private Properties propertiesBuilderOutputPort(String attributeName, String root, boolean simpleName, Object val)
+  {
+    String contextClass = simpleName ? null: PortContext.class.getCanonicalName();
+
+    root += "outputport" + LogicalPlanConfiguration.KEY_SEPARATOR + "*" + LogicalPlanConfiguration.KEY_SEPARATOR;
+
+    return propertiesBuilder(attributeName, root, contextClass, val);
+  }
+
+  /**
+   * Note: If the same name is given to an Attribute specified in multiple Context classes, then the type of that
+   * Attribute is required to be the same accross all Context classes. This is required because if a simple attribute
+   * name is specified in a properties file at the top level context then that attribute needs to be set in all child configurations. If
+   * there were multiple Attributes specified in different Contexts with the same name, but a different type, then
+   * it would not be possible to set the values of Attributes specified by a simple attribute name in the root
+   * context of a properties file. If this were the case, then adding another Attribute with the same name as a pre-existing Attribute to a new Context
+   * class would be a backwards incompatible change.
+   */
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testErrorSameAttrMultipleTypes()
+  {
+    //Trigger initialization of attributes for existing Contexts.
+    LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(new Configuration());
+
+    Exception ex = null;
+    try {
+      ContextUtils.buildAttributeMaps(Sets.newHashSet(MockContext1.class, MockContext2.class));
+    } catch (ValidationException e) {
+      ex = e;
+    }
+
+    Assert.assertNotNull(ex);
+    Assert.assertTrue(ex.getMessage().contains("is defined with two different types in two different context classes:"));
+
+    //Clear test data from Context.
+    ContextUtils.initialize();
+  }
+
+  private static final Logger logger = LoggerFactory.getLogger(LogicalPlanConfigurationTest.class);
+
+  public static class TestApplication implements StreamingApplication {
+    Integer testprop1;
+    Integer testprop2;
+    Integer testprop3;
+    TestInnerClass inncls;
+    public TestApplication() {
+      inncls=new TestInnerClass();
+    }
+
+    public Integer getTestprop1() {
+      return testprop1;
+    }
+
+    public void setTestprop1(Integer testprop1) {
+      this.testprop1 = testprop1;
+    }
+
+    public Integer getTestprop2() {
+      return testprop2;
+    }
+
+    public void setTestprop2(Integer testprop2) {
+      this.testprop2 = testprop2;
+    }
+
+    public Integer getTestprop3() {
+      return testprop3;
+    }
+
+    public void setTestprop3(Integer testprop3) {
+      this.testprop3 = testprop3;
+    }
+
+    public TestInnerClass getInncls() {
+      return inncls;
+    }
+
+    public void setInncls(TestInnerClass inncls) {
+      this.inncls = inncls;
+    }
+
+    @Override
+    public void populateDAG(DAG dag, Configuration conf) {
+
+    }
+    public class TestInnerClass{
+      Integer a;
+
+      public Integer getA() {
+        return a;
+      }
+
+      public void setA(Integer a) {
+        this.a = a;
+      }
+    }
+  }
+
+  public static class TestStatsListener implements StatsListener{
+
+    private int intProp;
+
+    public TestStatsListener()
+    {
+    }
+
+    @Override
+    public Response processStats(BatchedOperatorStats stats)
+    {
+      return null;
+    }
+
+    public int getIntProp()
+    {
+      return intProp;
+    }
+
+    public void setIntProp(int intProp)
+    {
+      this.intProp = intProp;
+    }
+
+    @Override
+    public int hashCode()
+    {
+      final int prime = 31;
+      int result = 1;
+      result = prime * result + intProp;
+      return result;
+    }
+
+    @Override
+    public boolean equals(Object obj)
+    {
+      if (this == obj)
+        return true;
+      if (obj == null)
+        return false;
+      if (getClass() != obj.getClass())
+        return false;
+      TestStatsListener other = (TestStatsListener) obj;
+      if (intProp != other.intProp)
+        return false;
+      return true;
+    }
+  }
+
+  public static class TestSchema
+  {
+  }
+
+  public static class SimpleTestApplication implements StreamingApplication
+  {
+    public final GenericTestOperator gt1 = new GenericTestOperator();
+    public final GenericTestOperator gt2 = new GenericTestOperator();
+    public final GenericTestOperator gt3 = new GenericTestOperator();
+
+    @Override
+    public void populateDAG(DAG dag, Configuration conf)
+    {
+      dag.addOperator("operator1", gt1);
+      dag.addOperator("operator2", gt2);
+      dag.addOperator("operator3", gt3);
+      dag.addStream("s1", gt1.outport1, gt2.inport1);
+      dag.addStream("s2", gt2.outport1, gt3.inport1, gt3.inport2);
+    }
+  };
+
+  public static interface MockContext1 extends Context
+  {
+    /**
+     * Number of tuples the poll buffer can cache without blocking the input stream to the port.
+     */
+    Attribute<Integer> TEST_ATTR = new Attribute<>(1024);
+
+    @SuppressWarnings("FieldNameHidesFieldInSuperclass")
+    long serialVersionUID = AttributeMap.AttributeInitializer.initialize(MockContext1.class);
+  }
+
+  public static interface MockContext2 extends Context
+  {
+    /**
+     * Number of tuples the poll buffer can cache without blocking the input stream to the port.
+     */
+    Attribute<Boolean> TEST_ATTR = new Attribute<>(false);
+
+    @SuppressWarnings("FieldNameHidesFieldInSuperclass")
+    long serialVersionUID = AttributeMap.AttributeInitializer.initialize(MockContext2.class);
+  }
+
+  @ApplicationAnnotation(name="SimpleTestApp")
+  public static class SimpleTestApplicationWithName extends SimpleTestApplication
+  {
+  };
+
+  private static final Logger LOG = LoggerFactory.getLogger(LogicalPlanConfigurationTest.class);
+}
+


[23/50] [abbrv] incubator-apex-core git commit: APEX-117 #resolve metrics aggregation triggered within app window

Posted by vr...@apache.org.
APEX-117 #resolve metrics aggregation triggered within app window


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/77e693c2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/77e693c2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/77e693c2

Branch: refs/heads/feature-module
Commit: 77e693c26962f90803b87212a512bfc3dd03c96b
Parents: 760039e
Author: Chandni Singh <cs...@apache.org>
Authored: Mon Sep 14 00:41:32 2015 -0700
Committer: Chandni Singh <cs...@apache.org>
Committed: Mon Sep 14 11:40:20 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/StreamingContainerManager.java    | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/77e693c2/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index 41738f4..6f36be6 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@ -842,6 +842,10 @@ public class StreamingContainerManager implements PlanContext
           metricPool.add(physicalMetrics);
         }
       }
+      if (metricPool.isEmpty()) {
+        //nothing to aggregate
+        continue;
+      }
       Map<String, Object> lm = aggregator.aggregate(windowId, metricPool);
 
       if (lm != null && lm.size() > 0) {


[33/50] [abbrv] incubator-apex-core git commit: APEX-81 #resolve Fix netlet snapshot dependency. APEX-122 #resolve Enforce no dependencies on snapshot versions in release builds.

Posted by vr...@apache.org.
APEX-81 #resolve Fix netlet snapshot dependency. APEX-122 #resolve Enforce no dependencies on snapshot versions in release builds.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/8578a714
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/8578a714
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/8578a714

Branch: refs/heads/feature-module
Commit: 8578a714193196f485c5cec4e380373215679619
Parents: 2cd917d
Author: Vlad Rozov <v....@datatorrent.com>
Authored: Tue Sep 15 09:22:22 2015 -0700
Committer: Vlad Rozov <v....@datatorrent.com>
Committed: Tue Sep 15 13:41:51 2015 -0700

----------------------------------------------------------------------
 api/pom.xml | 2 +-
 pom.xml     | 6 +++++-
 2 files changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8578a714/api/pom.xml
----------------------------------------------------------------------
diff --git a/api/pom.xml b/api/pom.xml
index 7fe08c0..a35d934 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -114,7 +114,7 @@
     <dependency>
       <groupId>${project.groupId}</groupId>
       <artifactId>netlet</artifactId>
-      <version>1.2.0-SNAPSHOT</version>
+      <version>1.2.0</version>
     </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8578a714/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 484798b..ad3a28d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -102,7 +102,7 @@
       </plugin>
       <plugin>
         <artifactId>maven-enforcer-plugin</artifactId>
-        <version>1.0.1</version>
+        <version>1.4.1</version>
         <executions>
           <execution>
             <id>enforce-tools</id>
@@ -117,6 +117,10 @@
                 <requireMavenVersion>
                   <version>[3.0.2,)</version>
                 </requireMavenVersion>
+                <requireReleaseDeps>
+                  <message>Snapshots are not allowed</message>
+                  <onlyWhenRelease>true</onlyWhenRelease>
+                </requireReleaseDeps>
               </rules>
             </configuration>
           </execution>


[13/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-112' into devel-3

Posted by vr...@apache.org.
Merge branch 'APEX-112' into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/97cbef6c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/97cbef6c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/97cbef6c

Branch: refs/heads/feature-module
Commit: 97cbef6c7294e80f8966fd812fd1b642cba477cf
Parents: 6c24259 ac25fba
Author: Chandni Singh <cs...@apache.org>
Authored: Thu Sep 10 17:38:39 2015 -0700
Committer: Chandni Singh <cs...@apache.org>
Committed: Thu Sep 10 17:38:39 2015 -0700

----------------------------------------------------------------------
 .../src/main/java/com/datatorrent/stram/StringCodecs.java   | 9 ++++++---
 .../java/com/datatorrent/stram/webapp/StramWebServices.java | 4 ++--
 2 files changed, 8 insertions(+), 5 deletions(-)
----------------------------------------------------------------------



[28/50] [abbrv] incubator-apex-core git commit: APEX-126 #comment made sure that handleIdleTime is called inside window boundaries

Posted by vr...@apache.org.
APEX-126 #comment made sure that handleIdleTime is called inside window boundaries


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/7801b7a3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/7801b7a3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/7801b7a3

Branch: refs/heads/feature-module
Commit: 7801b7a3af777afc26cda26fcbb0f11c25d3cbcd
Parents: c0baa9d
Author: Gaurav <ga...@datatorrent.com>
Authored: Tue Sep 15 10:02:06 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Tue Sep 15 10:02:06 2015 -0700

----------------------------------------------------------------------
 engine/src/main/java/com/datatorrent/stram/engine/GenericNode.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/7801b7a3/engine/src/main/java/com/datatorrent/stram/engine/GenericNode.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/engine/GenericNode.java b/engine/src/main/java/com/datatorrent/stram/engine/GenericNode.java
index 2a56a49..20795ce 100644
--- a/engine/src/main/java/com/datatorrent/stram/engine/GenericNode.java
+++ b/engine/src/main/java/com/datatorrent/stram/engine/GenericNode.java
@@ -512,7 +512,7 @@ public class GenericNode extends Node<Operator>
           }
 
           if (need2sleep) {
-            if (handleIdleTime) {
+            if (handleIdleTime && insideWindow) {
               ((IdleTimeHandler) operator).handleIdleTime();
             }
             else {


[17/50] [abbrv] incubator-apex-core git commit: APEX-120 #comment deleting temp folder

Posted by vr...@apache.org.
APEX-120 #comment deleting temp folder


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/760039e8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/760039e8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/760039e8

Branch: refs/heads/feature-module
Commit: 760039e81ec72de0d462b0d16f5961ed35648e83
Parents: c349090
Author: Gaurav <ga...@datatorrent.com>
Authored: Sat Sep 12 21:35:26 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Sat Sep 12 21:35:26 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java   | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/760039e8/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java b/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
index a1504e4..892d221 100644
--- a/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
+++ b/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
@@ -63,6 +63,7 @@ public class AsyncFSStorageAgentTest
     {
       try {
         FileUtils.deleteDirectory(new File("target/" + description.getClassName()));
+        FileUtils.deleteDirectory(new File(FileUtils.getTempDirectory(), "localcheckpoint"));
       } catch (IOException e) {
         throw new RuntimeException(e);
       }


[40/50] [abbrv] incubator-apex-core git commit: Bumped minimum maven version to 3.0.5. Split enforcer executions by enforced rule.

Posted by vr...@apache.org.
Bumped minimum maven version to 3.0.5. Split enforcer executions by enforced rule.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/b7187837
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/b7187837
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/b7187837

Branch: refs/heads/feature-module
Commit: b71878376379a9fd4a02a7cca239c5bc5dd578c5
Parents: 7503dde
Author: Vlad Rozov <v....@datatorrent.com>
Authored: Wed Sep 16 16:17:40 2015 -0700
Committer: Vlad Rozov <v....@datatorrent.com>
Committed: Wed Sep 16 16:17:40 2015 -0700

----------------------------------------------------------------------
 pom.xml | 72 +++++++++++++++++++++++++++++++++++++++++-------------------
 1 file changed, 49 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b7187837/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index ad3a28d..87c34f1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -102,29 +102,6 @@
       </plugin>
       <plugin>
         <artifactId>maven-enforcer-plugin</artifactId>
-        <version>1.4.1</version>
-        <executions>
-          <execution>
-            <id>enforce-tools</id>
-            <goals>
-              <goal>enforce</goal>
-            </goals>
-            <configuration>
-              <rules>
-                <requireJavaVersion>
-                  <version>[1.7.0,)</version>
-                </requireJavaVersion>
-                <requireMavenVersion>
-                  <version>[3.0.2,)</version>
-                </requireMavenVersion>
-                <requireReleaseDeps>
-                  <message>Snapshots are not allowed</message>
-                  <onlyWhenRelease>true</onlyWhenRelease>
-                </requireReleaseDeps>
-              </rules>
-            </configuration>
-          </execution>
-        </executions>
       </plugin>
       <plugin>
         <artifactId>maven-surefire-plugin</artifactId>
@@ -209,6 +186,55 @@
           <artifactId>exec-maven-plugin</artifactId>
           <version>1.2.1</version>
         </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-enforcer-plugin</artifactId>
+          <version>1.4.1</version>
+          <executions>
+            <execution>
+              <id>enforce-java</id>
+              <goals>
+                <goal>enforce</goal>
+              </goals>
+              <configuration>
+                <rules>
+                  <requireJavaVersion>
+                    <version>[1.7.0,)</version>
+                    <message>Build requires Java 1.7 or above.</message>
+                  </requireJavaVersion>
+                </rules>
+              </configuration>
+            </execution>
+            <execution>
+              <id>enforce-maven</id>
+              <goals>
+                <goal>enforce</goal>
+              </goals>
+              <configuration>
+                <rules>
+                  <requireMavenVersion>
+                    <version>[3.0.5,)</version>
+                    <message>Build requires Maven 3.0.5 or above.</message>
+                  </requireMavenVersion>
+                </rules>
+              </configuration>
+            </execution>
+            <execution>
+              <id>enforce-release-dependencies</id>
+              <goals>
+                <goal>enforce</goal>
+              </goals>
+              <configuration>
+                <rules>
+                  <requireReleaseDeps>
+                    <message>Snapshots dependencies are not allowed for release build.</message>
+                    <onlyWhenRelease>true</onlyWhenRelease>
+                  </requireReleaseDeps>
+                </rules>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
       </plugins>
     </pluginManagement>
     <extensions>


[29/50] [abbrv] incubator-apex-core git commit: APEX-127 #resolve #comment when a method has no comments and only omitFromUI tag than the super class method info was being used

Posted by vr...@apache.org.
APEX-127 #resolve #comment when a method has no comments and only omitFromUI tag than the super class method info was being used


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/b1666b77
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/b1666b77
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/b1666b77

Branch: refs/heads/feature-module
Commit: b1666b778b54667023f5beba43e07227eb90298b
Parents: c0baa9d
Author: Chandni Singh <cs...@apache.org>
Authored: Mon Sep 14 17:24:09 2015 -0700
Committer: Chandni Singh <cs...@apache.org>
Committed: Tue Sep 15 11:20:43 2015 -0700

----------------------------------------------------------------------
 .../stram/webapp/OperatorDiscoverer.java        | 66 ++++++++++++++------
 .../stram/webapp/OperatorDiscoveryTest.java     |  9 +++
 2 files changed, 56 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b1666b77/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java b/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
index f08a96b..5c2c580 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
@@ -25,7 +25,6 @@ import com.datatorrent.stram.webapp.asm.CompactFieldNode;
 import com.google.common.base.Predicate;
 import com.google.common.base.Splitter;
 import com.google.common.collect.Iterables;
-import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -75,8 +74,6 @@ public class OperatorDiscoverer
   public static final String PORT_TYPE_INFO_KEY = "portTypeInfo";
   private final TypeGraph typeGraph = TypeGraphFactory.createTypeGraphProtoType();
 
-  private static final String USE_SCHEMA_TAG = "@useSchema";
-  private static final String DESCRIPTION_TAG = "@description";
   private static final Pattern WHITESPACE_PATTERN = Pattern.compile("\\s+?");
 
   private static final String SCHEMA_REQUIRED_KEY = "schemaRequired";
@@ -88,7 +85,6 @@ public class OperatorDiscoverer
     final Map<String, String> tags = new HashMap<String, String>();
     final Map<String, MethodInfo> getMethods = Maps.newHashMap();
     final Map<String, MethodInfo> setMethods = Maps.newHashMap();
-    final Set<String> invisibleGetSetMethods = new HashSet<String>();
     final Map<String, String> fields = new HashMap<String, String>();
   }
 
@@ -97,6 +93,34 @@ public class OperatorDiscoverer
     Map<String, String> descriptions = Maps.newHashMap();
     Map<String, String> useSchemas = Maps.newHashMap();
     String comment;
+    boolean omitFromUI;
+  }
+
+  enum MethodTagType
+  {
+    USE_SCHEMA("@useSchema"),
+    DESCRIPTION("@description"),
+    OMIT_FROM_UI("@omitFromUI");
+
+    private static final Map<String, MethodTagType> TAG_TEXT_MAPPING = Maps.newHashMap();
+
+    static {
+      for (MethodTagType type : MethodTagType.values()) {
+        TAG_TEXT_MAPPING.put(type.tag, type);
+      }
+    }
+
+    private final String tag;
+
+    MethodTagType(String tag)
+    {
+      this.tag = tag;
+    }
+
+    static MethodTagType from(String tag)
+    {
+      return TAG_TEXT_MAPPING.get(tag);
+    }
   }
 
   private class JavadocSAXHandler extends DefaultHandler {
@@ -139,12 +163,9 @@ public class OperatorDiscoverer
             boolean lSetterCheck = !lGetterCheck && isSetter(methodName);
 
             if (lGetterCheck || lSetterCheck) {
-              if ("@omitFromUI".equals(tagName)) {
-                oci.invisibleGetSetMethods.add(methodName);
-              } else if (DESCRIPTION_TAG.equals(tagName)) {
-                addTagToMethod(lGetterCheck ? oci.getMethods : oci.setMethods, tagText, true);
-              } else if (USE_SCHEMA_TAG.equals(tagName)) {
-                addTagToMethod(lGetterCheck ? oci.getMethods : oci.setMethods, tagText, false);
+              MethodTagType type = MethodTagType.from(tagName);
+              if (type != null) {
+                addTagToMethod(lGetterCheck ? oci.getMethods : oci.setMethods, tagText, type);
               }
             }
 //            if ("@return".equals(tagName) && isGetter(methodName)) {
@@ -168,17 +189,21 @@ public class OperatorDiscoverer
       }
     }
 
-    private void addTagToMethod(Map<String, MethodInfo> methods, String tagText, boolean isDescription)
+    private void addTagToMethod(Map<String, MethodInfo> methods, String tagText, MethodTagType tagType)
     {
       MethodInfo mi = methods.get(methodName);
       if (mi == null) {
         mi = new MethodInfo();
         methods.put(methodName, mi);
       }
+      if (tagType == MethodTagType.OMIT_FROM_UI) {
+        mi.omitFromUI = true;
+        return;
+      }
       String[] tagParts = Iterables.toArray(Splitter.on(WHITESPACE_PATTERN).trimResults().omitEmptyStrings().
         limit(2).split(tagText), String.class);
       if (tagParts.length == 2) {
-        if (isDescription) {
+        if (tagType == MethodTagType.DESCRIPTION) {
           mi.descriptions.put(tagParts[0], tagParts[1]);
         } else {
           mi.useSchemas.put(tagParts[0], tagParts[1]);
@@ -388,7 +413,7 @@ public class OperatorDiscoverer
       }
     });
 
-    if (searchTerm == null && parent == Operator.class.getName()) {
+    if (searchTerm == null && parent.equals(Operator.class.getName())) {
       return filteredClass;
     }
 
@@ -398,7 +423,7 @@ public class OperatorDiscoverer
 
     Set<String> result = new HashSet<String>();
     for (String clazz : filteredClass) {
-      if (parent == Operator.class.getName() || typeGraph.isAncestor(parent, clazz)) {
+      if (parent.equals(Operator.class.getName()) || typeGraph.isAncestor(parent, clazz)) {
         if (searchTerm == null) {
           result.add(clazz);
         } else {
@@ -595,13 +620,16 @@ public class OperatorDiscoverer
         result.put(propJ);
         continue;
       }
-      if (oci.invisibleGetSetMethods.contains(getPrefix + propName) || oci.invisibleGetSetMethods.contains(setPrefix + propName)) {
+      MethodInfo setterInfo = oci.setMethods.get(setPrefix + propName);
+      MethodInfo getterInfo = oci.getMethods.get(getPrefix + propName);
+
+      if ((getterInfo != null && getterInfo.omitFromUI) || (setterInfo != null && setterInfo.omitFromUI)) {
         continue;
       }
-      MethodInfo methodInfo = oci.setMethods.get(setPrefix + propName);
-      methodInfo = methodInfo == null ? oci.getMethods.get(getPrefix + propName) : methodInfo;
-      if (methodInfo != null) {
-        addTagsToProperties(methodInfo, propJ);
+      if (setterInfo != null) {
+        addTagsToProperties(setterInfo, propJ);
+      } else if (getterInfo != null) {
+        addTagsToProperties(getterInfo, propJ);
       }
       result.put(propJ);
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b1666b77/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
index 9af7d34..7dca6cd 100644
--- a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
@@ -1126,5 +1126,14 @@ public class OperatorDiscoveryTest
 
     Assert.assertEquals("number of port types with schema", 0, portsWithSchemaClasses.length());
   }
+
+  @Test
+  public void testMethodType()
+  {
+    Assert.assertEquals("@omitFromUI", OperatorDiscoverer.MethodTagType.OMIT_FROM_UI, OperatorDiscoverer.MethodTagType.from("@omitFromUI"));
+    Assert.assertEquals("@useSchema", OperatorDiscoverer.MethodTagType.USE_SCHEMA, OperatorDiscoverer.MethodTagType.from("@useSchema"));
+    Assert.assertEquals("@description", OperatorDiscoverer.MethodTagType.DESCRIPTION, OperatorDiscoverer.MethodTagType.from("@description"));
+    Assert.assertEquals("@random", null, OperatorDiscoverer.MethodTagType.from("@random"));
+  }
 }
 


[50/50] [abbrv] incubator-apex-core git commit: Merge branch 'SPOI-6333' of github.com:amberarrow/incubator-apex-core into amberarrow-SPOI-6333

Posted by vr...@apache.org.
Merge branch 'SPOI-6333' of github.com:amberarrow/incubator-apex-core into amberarrow-SPOI-6333


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/507fac34
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/507fac34
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/507fac34

Branch: refs/heads/feature-module
Commit: 507fac34baebdd5cc2e96a4baf29a77fc12bd9fe
Parents: de1d003 bb11094
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Mon Sep 21 11:31:54 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Mon Sep 21 11:31:54 2015 -0700

----------------------------------------------------------------------
 .../main/java/__packageInPathFormat__/RandomNumberGenerator.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[08/50] [abbrv] incubator-apex-core git commit: APEX-102 #resolve #comment converting time-buckets and dimensions to list

Posted by vr...@apache.org.
APEX-102 #resolve #comment converting time-buckets and dimensions to list


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/b57972bd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/b57972bd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/b57972bd

Branch: refs/heads/feature-module
Commit: b57972bd336f8db6f41c72d2c8c7c95ff5ca934b
Parents: 7888aa2
Author: Chandni Singh <cs...@apache.org>
Authored: Thu Sep 10 09:48:41 2015 -0700
Committer: Chandni Singh <cs...@apache.org>
Committed: Thu Sep 10 09:48:41 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/appdata/AppDataPushAgent.java     | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b57972bd/engine/src/main/java/com/datatorrent/stram/appdata/AppDataPushAgent.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/appdata/AppDataPushAgent.java b/engine/src/main/java/com/datatorrent/stram/appdata/AppDataPushAgent.java
index 3ddb313..52ce622 100644
--- a/engine/src/main/java/com/datatorrent/stram/appdata/AppDataPushAgent.java
+++ b/engine/src/main/java/com/datatorrent/stram/appdata/AppDataPushAgent.java
@@ -261,14 +261,14 @@ public class AppDataPushAgent extends AbstractService
         valueSchema.put("type", type == null ? metricValue.getClass().getCanonicalName() : type);
         String[] dimensionAggregators = metricAggregatorMeta.getDimensionAggregatorsFor(metricName);
         if (dimensionAggregators != null) {
-          valueSchema.put("dimensionAggregators", dimensionAggregators);
+          valueSchema.put("dimensionAggregators", Arrays.asList(dimensionAggregators));
         }
         valueSchemas.put(valueSchema);
       }
       result.put("values", valueSchemas);
       String[] timeBuckets = metricAggregatorMeta.getTimeBuckets();
       if (timeBuckets != null) {
-        result.put("timeBuckets", timeBuckets);
+        result.put("timeBuckets", Arrays.asList(timeBuckets));
       }
 
     } catch (JSONException ex) {


[15/50] [abbrv] incubator-apex-core git commit: APEX-113 #resolve

Posted by vr...@apache.org.
APEX-113 #resolve


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/4023ce01
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/4023ce01
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/4023ce01

Branch: refs/heads/feature-module
Commit: 4023ce01d9bd9b64bf7034e4bfcf591e4409febb
Parents: 928d368
Author: Gaurav <ga...@datatorrent.com>
Authored: Fri Sep 11 09:39:19 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Fri Sep 11 09:39:19 2015 -0700

----------------------------------------------------------------------
 engine/src/main/java/com/datatorrent/stram/StramClient.java | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/4023ce01/engine/src/main/java/com/datatorrent/stram/StramClient.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StramClient.java b/engine/src/main/java/com/datatorrent/stram/StramClient.java
index db36ef6..700002a 100644
--- a/engine/src/main/java/com/datatorrent/stram/StramClient.java
+++ b/engine/src/main/java/com/datatorrent/stram/StramClient.java
@@ -550,6 +550,9 @@ public class StramClient
       if (dag.getMasterJVMOptions() != null) {
         vargs.add(dag.getMasterJVMOptions());
       }
+      Path tmpDir = new Path(ApplicationConstants.Environment.PWD.$(),
+        YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR);
+      vargs.add("-Djava.io.tmpdir=" + tmpDir);
       vargs.add("-Xmx" + (amMemory * 3 / 4) + "m");
       vargs.add("-XX:+HeapDumpOnOutOfMemoryError");
       vargs.add("-XX:HeapDumpPath=/tmp/dt-heap-" + appId.getId() + ".bin");


[31/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-127' into develmerge

Posted by vr...@apache.org.
Merge branch 'APEX-127' into develmerge


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/2cd917d9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/2cd917d9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/2cd917d9

Branch: refs/heads/feature-module
Commit: 2cd917d9b86471ef50ed69e286faddf9212a871e
Parents: 0936445 b1666b7
Author: siyuan <si...@datatorrent.com>
Authored: Tue Sep 15 12:24:28 2015 -0700
Committer: siyuan <si...@datatorrent.com>
Committed: Tue Sep 15 12:24:28 2015 -0700

----------------------------------------------------------------------
 .../stram/webapp/OperatorDiscoverer.java        | 66 ++++++++++++++------
 .../stram/webapp/OperatorDiscoveryTest.java     |  9 +++
 2 files changed, 56 insertions(+), 19 deletions(-)
----------------------------------------------------------------------



[47/50] [abbrv] incubator-apex-core git commit: APEX-42: Added support for configuring unifier attributes through configuration file

Posted by vr...@apache.org.
APEX-42: Added support for configuring unifier attributes through configuration file


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/9b78c67b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/9b78c67b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/9b78c67b

Branch: refs/heads/feature-module
Commit: 9b78c67b68dd42b8216457e886c3cf221b9275b2
Parents: 282c43b
Author: Chaitanya <ch...@datatorrent.com>
Authored: Mon Sep 21 17:30:00 2015 +0530
Committer: Chaitanya <ch...@datatorrent.com>
Committed: Mon Sep 21 18:13:17 2015 +0530

----------------------------------------------------------------------
 .../plan/logical/LogicalPlanConfiguration.java  | 37 ++++++++++++++++---
 .../logical/LogicalPlanConfigurationTest.java   | 39 ++++++++++++++++++++
 2 files changed, 71 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/9b78c67b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
index 7a53cd7..6b141bc 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
@@ -120,7 +120,7 @@ public class LogicalPlanConfiguration {
    */
   protected enum StramElement {
     APPLICATION("application"), GATEWAY("gateway"), TEMPLATE("template"), OPERATOR("operator"),STREAM("stream"), PORT("port"), INPUT_PORT("inputport"),OUTPUT_PORT("outputport"),
-    ATTR("attr"), PROP("prop"),CLASS("class"),PATH("path");
+    ATTR("attr"), PROP("prop"),CLASS("class"),PATH("path"),UNIFIER("unifier");
     private final String value;
 
     /**
@@ -168,7 +168,8 @@ public class LogicalPlanConfiguration {
     GATEWAY(StramElement.GATEWAY, ConfElement.APPLICATION, null, null),
     OPERATOR(StramElement.OPERATOR, ConfElement.APPLICATION, null, OperatorContext.class),
     STREAM(StramElement.STREAM, ConfElement.APPLICATION, null, null),
-    PORT(StramElement.PORT, ConfElement.OPERATOR, EnumSet.of(StramElement.INPUT_PORT, StramElement.OUTPUT_PORT), PortContext.class);
+    PORT(StramElement.PORT, ConfElement.OPERATOR, EnumSet.of(StramElement.INPUT_PORT, StramElement.OUTPUT_PORT), PortContext.class),
+    UNIFIER(StramElement.UNIFIER, ConfElement.PORT, null, null);
 
     protected static final Map<StramElement, ConfElement> STRAM_ELEMENT_TO_CONF_ELEMENT = Maps.newHashMap();
     protected static final Map<Class<? extends Context>, ConfElement> CONTEXT_TO_CONF_ELEMENT = Maps.newHashMap();
@@ -182,6 +183,7 @@ public class LogicalPlanConfiguration {
       STRAM.setChildren(Sets.newHashSet(APPLICATION, TEMPLATE));
       APPLICATION.setChildren(Sets.newHashSet(GATEWAY, OPERATOR, STREAM));
       OPERATOR.setChildren(Sets.newHashSet(PORT));
+      PORT.setChildren(Sets.newHashSet(UNIFIER));
 
       STRAM_ELEMENT_TO_CONF_ELEMENT.clear();
 
@@ -1126,7 +1128,7 @@ public class LogicalPlanConfiguration {
     private final Map<String, String> appAliases = Maps.newHashMap();
 
     private static final StramElement[] CHILD_ELEMENTS = new StramElement[]{StramElement.APPLICATION, StramElement.GATEWAY, StramElement.TEMPLATE, StramElement.OPERATOR,
-            StramElement.PORT, StramElement.INPUT_PORT, StramElement.OUTPUT_PORT, StramElement.STREAM, StramElement.TEMPLATE, StramElement.ATTR};
+            StramElement.PORT, StramElement.INPUT_PORT, StramElement.OUTPUT_PORT, StramElement.STREAM, StramElement.TEMPLATE, StramElement.ATTR, StramElement.UNIFIER};
 
     StramConf() {
     }
@@ -1151,7 +1153,7 @@ public class LogicalPlanConfiguration {
 
     private static final StramElement[] CHILD_ELEMENTS = new StramElement[]{StramElement.GATEWAY, StramElement.OPERATOR, StramElement.PORT,
             StramElement.INPUT_PORT, StramElement.OUTPUT_PORT, StramElement.STREAM, StramElement.ATTR, StramElement.CLASS, StramElement.PATH,
-            StramElement.PROP};
+            StramElement.PROP, StramElement.UNIFIER};
 
     @SuppressWarnings("unused")
     AppConf() {
@@ -1446,7 +1448,7 @@ public class LogicalPlanConfiguration {
    */
   private static class PortConf extends Conf {
 
-    private static final StramElement[] CHILD_ELEMENTS = new StramElement[] {StramElement.ATTR};
+    private static final StramElement[] CHILD_ELEMENTS = new StramElement[] {StramElement.ATTR, StramElement.UNIFIER};
 
     @SuppressWarnings("unused")
     PortConf() {
@@ -1477,6 +1479,7 @@ public class LogicalPlanConfiguration {
     elementMaps.put(StramElement.PORT, PortConf.class);
     elementMaps.put(StramElement.INPUT_PORT, PortConf.class);
     elementMaps.put(StramElement.OUTPUT_PORT, PortConf.class);
+    elementMaps.put(StramElement.UNIFIER, OperatorConf.class);
   }
 
   /**
@@ -1746,6 +1749,8 @@ public class LogicalPlanConfiguration {
         parseAppElement(index, keys, element, conf, propertyName, propertyValue);
       } else if (element == StramElement.GATEWAY) {
         parseGatewayElement(element, conf, keys, index, propertyName, propertyValue);
+      } else if ((element == StramElement.UNIFIER)) {
+        parseUnifierElement(element, conf, keys, index, propertyName, propertyValue);
       } else if ((element == StramElement.ATTR) || ((element == null) && (conf.getDefaultChildElement() == StramElement.ATTR))) {
         parseAttributeElement(element, keys, index, conf, propertyValue, propertyName);
       } else if ((element == StramElement.PROP) || ((element == null) && (conf.getDefaultChildElement() == StramElement.PROP))) {
@@ -1798,6 +1803,24 @@ public class LogicalPlanConfiguration {
   }
 
   /**
+   * This is a helper method for {@link #parseStramPropertyTokens} which is responsible for parsing a unifier element.
+   * @param element The current {@link StramElement} of the property being parsed.
+   * @param keys The keys that the property being parsed was split into.
+   * @param index The current key that the parser is on.
+   * @param propertyValue The value associated with the property being parsed.
+   * @param propertyName The complete unprocessed name of the property being parsed.
+   */
+  private void parseUnifierElement(StramElement element, Conf conf1, String[] keys, int index, String propertyName, String propertyValue)
+  {
+    Conf elConf = addConf(element, null, conf1);
+    if (elConf != null) {
+      parseStramPropertyTokens(keys, index+1, propertyName, propertyValue, elConf);
+    } else {
+      LOG.error("Invalid configuration key: {}", propertyName);
+    }
+  }
+
+  /**
    * This is a helper method for {@link #parseStramPropertyTokens} which is responsible for parsing an attribute.
    * @param element The current {@link StramElement} of the property being parsed.
    * @param keys The keys that the property being parsed was split into.
@@ -2292,6 +2315,10 @@ public class LogicalPlanConfiguration {
         List<PortConf> portConfs = getMatchingChildConf(opConfs, om.getPortName(), StramElement.PORT);
         outPortConfs.addAll(portConfs);
         setAttributes(outPortConfs, om.getAttributes());
+        List<OperatorConf> unifConfs = getMatchingChildConf(outPortConfs, null, StramElement.UNIFIER);
+        if(unifConfs.size() != 0) {
+          setAttributes(unifConfs, om.getUnifierMeta().getAttributes());
+        }
       }
       ow.populateAggregatorMeta();
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/9b78c67b/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java b/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java
index 077e3a9..9b2003b 100644
--- a/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanConfigurationTest.java
@@ -525,6 +525,45 @@ public class LogicalPlanConfigurationTest {
   }
 
   @Test
+  @SuppressWarnings( {"UnnecessaryBoxing", "AssertEqualsBetweenInconvertibleTypes"})
+  public void testUnifierLevelAttributes() {
+    String appName = "app1";
+    final GenericTestOperator operator1 = new GenericTestOperator();
+    final GenericTestOperator operator2 = new GenericTestOperator();
+    StreamingApplication app = new StreamingApplication() {
+      @Override
+      public void populateDAG(DAG dag, Configuration conf)
+      {
+        dag.addOperator("operator1", operator1);
+        dag.addOperator("operator2", operator2);
+        dag.addStream("s1", operator1.outport1, operator2.inport1);
+      }
+    };
+
+    Properties props = new Properties();
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".class", app.getClass().getName());
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1.outputport.outport1.unifier." + OperatorContext.APPLICATION_WINDOW_COUNT.getName(), "2");
+    props.put(StreamingApplication.DT_PREFIX + "application." + appName + ".operator.operator1.outputport.outport1.unifier." + OperatorContext.MEMORY_MB.getName(), "512");
+    LogicalPlanConfiguration dagBuilder = new LogicalPlanConfiguration(new Configuration(false));
+    dagBuilder.addFromProperties(props, null);
+
+    String appPath = app.getClass().getName().replace(".", "/") + ".class";
+
+    LogicalPlan dag = new LogicalPlan();
+    dagBuilder.prepareDAG(dag, app, appPath);
+
+    OperatorMeta om = null;
+    for (Map.Entry<OutputPortMeta, StreamMeta> entry : dag.getOperatorMeta("operator1").getOutputStreams().entrySet()) {
+      if(entry.getKey().getPortName().equals("outport1")) {
+        om = entry.getKey().getUnifierMeta();
+      }
+    }
+    Assert.assertNotNull(om);
+    Assert.assertEquals("", Integer.valueOf(2), om.getValue(OperatorContext.APPLICATION_WINDOW_COUNT));
+    Assert.assertEquals("", Integer.valueOf(512), om.getValue(OperatorContext.MEMORY_MB));
+  }
+
+  @Test
   public void testOperatorLevelProperties() {
     String appName = "app1";
     final GenericTestOperator operator1 = new GenericTestOperator();


[19/50] [abbrv] incubator-apex-core git commit: Add tag @since 3.1.0

Posted by vr...@apache.org.
Add tag @since 3.1.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/8aae5733
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/8aae5733
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/8aae5733

Branch: refs/heads/feature-module
Commit: 8aae573318b74948478a1dd29d832d4daa1a9080
Parents: 760039e
Author: Aniruddha Thombre <an...@aniruddhas.com>
Authored: Mon Sep 14 17:22:48 2015 +0530
Committer: Aniruddha Thombre <an...@aniruddhas.com>
Committed: Mon Sep 14 17:22:48 2015 +0530

----------------------------------------------------------------------
 .../java/com/datatorrent/common/util/AsyncFSStorageAgent.java   | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8aae5733/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
index cc8da25..374917a 100644
--- a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
+++ b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
@@ -13,6 +13,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * @since 3.1.0
+ */
+
 package com.datatorrent.common.util;
 
 import java.io.*;


[16/50] [abbrv] incubator-apex-core git commit: APEX-115 #resolve

Posted by vr...@apache.org.
APEX-115 #resolve


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/c3490900
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/c3490900
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/c3490900

Branch: refs/heads/feature-module
Commit: c349090049c14564ce9468c0c4ddae2c54d042f7
Parents: 4023ce0
Author: Gaurav <ga...@datatorrent.com>
Authored: Fri Sep 11 13:05:17 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Fri Sep 11 13:05:17 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/StreamingAppMasterService.java     | 3 ++-
 .../java/com/datatorrent/stram/engine/StreamingContainer.java     | 3 ++-
 2 files changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/c3490900/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java b/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
index 98c78de..2c8e646 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
@@ -28,6 +28,7 @@ import javax.xml.bind.annotation.XmlElement;
 
 import com.google.common.collect.Maps;
 
+import org.apache.commons.io.FileUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -709,7 +710,7 @@ public class StreamingAppMasterService extends CompositeService
 
       if (UserGroupInformation.isSecurityEnabled() && System.currentTimeMillis() >= expiryTime && hdfsKeyTabFile != null) {
         String applicationId = appAttemptID.getApplicationId().toString();
-        expiryTime = StramUserLogin.refreshTokens(tokenLifeTime, "." + File.separator + "tmp", applicationId, conf, hdfsKeyTabFile, credentials, rmAddress, true);
+        expiryTime = StramUserLogin.refreshTokens(tokenLifeTime, FileUtils.getTempDirectoryPath(), applicationId, conf, hdfsKeyTabFile, credentials, rmAddress, true);
       }
 
       Runnable r;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/c3490900/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java b/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
index 2705093..5c740be 100644
--- a/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
+++ b/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
@@ -34,6 +34,7 @@ import java.util.concurrent.CountDownLatch;
 import net.engio.mbassy.bus.MBassador;
 import net.engio.mbassy.bus.config.BusConfiguration;
 
+import org.apache.commons.io.FileUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -599,7 +600,7 @@ public class StreamingContainer extends YarnContainerMain
     while (!exitHeartbeatLoop) {
 
       if (UserGroupInformation.isSecurityEnabled() && System.currentTimeMillis() >= expiryTime && hdfsKeyTabFile != null) {
-        expiryTime = StramUserLogin.refreshTokens(tokenLifeTime, "." + File.separator + "tmp", containerId, conf, hdfsKeyTabFile, credentials, null, false);
+        expiryTime = StramUserLogin.refreshTokens(tokenLifeTime, FileUtils.getTempDirectoryPath(), containerId, conf, hdfsKeyTabFile, credentials, null, false);
       }
       synchronized (this.heartbeatTrigger) {
         try {


[48/50] [abbrv] incubator-apex-core git commit: SPOI-6333 Use numTuples instead of constant

Posted by vr...@apache.org.
SPOI-6333 Use numTuples instead of constant


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/bb110946
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/bb110946
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/bb110946

Branch: refs/heads/feature-module
Commit: bb11094631287919476a130d61957a6f046f6fdc
Parents: e482804
Author: Munagala V. Ramanath <ra...@apache.org>
Authored: Mon Sep 21 10:02:31 2015 -0700
Committer: Munagala V. Ramanath <ra...@apache.org>
Committed: Mon Sep 21 10:02:31 2015 -0700

----------------------------------------------------------------------
 .../main/java/__packageInPathFormat__/RandomNumberGenerator.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/bb110946/apex-app-archetype/src/main/resources/archetype-resources/src/main/java/__packageInPathFormat__/RandomNumberGenerator.java
----------------------------------------------------------------------
diff --git a/apex-app-archetype/src/main/resources/archetype-resources/src/main/java/__packageInPathFormat__/RandomNumberGenerator.java b/apex-app-archetype/src/main/resources/archetype-resources/src/main/java/__packageInPathFormat__/RandomNumberGenerator.java
index 26d1cb8..24f4670 100644
--- a/apex-app-archetype/src/main/resources/archetype-resources/src/main/java/__packageInPathFormat__/RandomNumberGenerator.java
+++ b/apex-app-archetype/src/main/resources/archetype-resources/src/main/java/__packageInPathFormat__/RandomNumberGenerator.java
@@ -29,7 +29,7 @@ public class RandomNumberGenerator extends BaseOperator implements InputOperator
   @Override
   public void emitTuples()
   {
-    if (count++ < 100) {
+    if (count++ < numTuples) {
       out.emit(Math.random());
     }
   }


[24/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-117' of https://github.com/chandnisingh/incubator-apex-core into devel-3

Posted by vr...@apache.org.
Merge branch 'APEX-117' of https://github.com/chandnisingh/incubator-apex-core into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/5e1d5411
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/5e1d5411
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/5e1d5411

Branch: refs/heads/feature-module
Commit: 5e1d541154639372c62143e8f990762183721d14
Parents: efaa8f2 77e693c
Author: David Yan <da...@datatorrent.com>
Authored: Mon Sep 14 12:50:00 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Mon Sep 14 12:50:00 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/StreamingContainerManager.java    | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------



[46/50] [abbrv] incubator-apex-core git commit: Fix APEX-141: Fix ClientHandler Not Found exception in Hadoop 2.2

Posted by vr...@apache.org.
Fix APEX-141: Fix ClientHandler Not Found exception in Hadoop 2.2


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/282c43b2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/282c43b2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/282c43b2

Branch: refs/heads/feature-module
Commit: 282c43b2e6c031700bb0754e36b2680f1846bf63
Parents: e482804
Author: bright <br...@bright-mac.local>
Authored: Fri Sep 18 16:46:40 2015 -0700
Committer: bright <br...@bright-mac.local>
Committed: Fri Sep 18 16:46:40 2015 -0700

----------------------------------------------------------------------
 engine/src/main/java/com/datatorrent/stram/StramClient.java | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/282c43b2/engine/src/main/java/com/datatorrent/stram/StramClient.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StramClient.java b/engine/src/main/java/com/datatorrent/stram/StramClient.java
index 700002a..8a6230a 100644
--- a/engine/src/main/java/com/datatorrent/stram/StramClient.java
+++ b/engine/src/main/java/com/datatorrent/stram/StramClient.java
@@ -121,6 +121,7 @@ public class StramClient
       org.mozilla.javascript.Scriptable.class,
       // The jersey client inclusion is only for Hadoop-2.2 and should be removed when we upgrade our Hadoop
       // dependency version since Hadoop-2.3 onwards has jersey client bundled
+      com.sun.jersey.api.client.ClientHandler.class,
       com.sun.jersey.client.apache4.ApacheHttpClient4Handler.class
   };
 


[12/50] [abbrv] incubator-apex-core git commit: APEX-112 #resolve null values should not converted to the string "null", and added null check on the beanutils converters

Posted by vr...@apache.org.
APEX-112 #resolve null values should not converted to the string "null", and added null check on the beanutils converters


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/ac25fbaa
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/ac25fbaa
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/ac25fbaa

Branch: refs/heads/feature-module
Commit: ac25fbaa63844e6760a90711a956402388cd22fa
Parents: 6c24259
Author: David Yan <da...@datatorrent.com>
Authored: Thu Sep 10 16:27:46 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Thu Sep 10 16:49:52 2015 -0700

----------------------------------------------------------------------
 .../src/main/java/com/datatorrent/stram/StringCodecs.java   | 9 ++++++---
 .../java/com/datatorrent/stram/webapp/StramWebServices.java | 4 ++--
 2 files changed, 8 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ac25fbaa/engine/src/main/java/com/datatorrent/stram/StringCodecs.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StringCodecs.java b/engine/src/main/java/com/datatorrent/stram/StringCodecs.java
index eb31c3d..0d5e359 100644
--- a/engine/src/main/java/com/datatorrent/stram/StringCodecs.java
+++ b/engine/src/main/java/com/datatorrent/stram/StringCodecs.java
@@ -54,6 +54,9 @@ public class StringCodecs
       @SuppressWarnings("unchecked")
       public Object convert(Class type, Object value)
       {
+        if (value == null) {
+          return null;
+        }
         for (Class<?> clazz = value.getClass(); clazz != null; clazz = clazz.getSuperclass()) {
           Class<? extends StringCodec> codec = codecs.get(clazz);
           if (codec == null) {
@@ -84,7 +87,7 @@ public class StringCodecs
       @Override
       public Object convert(Class type, Object value)
       {
-        return URI.create(value.toString());
+        return value == null ? null : URI.create(value.toString());
       }
     }, URI.class);
   }
@@ -124,7 +127,7 @@ public class StringCodecs
             @Override
             public Object convert(Class type, Object value)
             {
-              return codecInstance.fromString(value.toString());
+              return value == null ? null : codecInstance.fromString(value.toString());
             }
 
           }, entry.getKey());
@@ -150,7 +153,7 @@ public class StringCodecs
       @Override
       public Object convert(Class type, Object value)
       {
-        return codecInstance.fromString(value.toString());
+        return value == null ? null : codecInstance.fromString(value.toString());
       }
 
     }, clazz);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ac25fbaa/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java b/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java
index 97edf39..117681e 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java
@@ -554,7 +554,7 @@ public class StramWebServices
       Iterator<String> keys = request.keys();
       while (keys.hasNext()) {
         String key = keys.next();
-        String val = request.getString(key);
+        String val = request.isNull(key) ? null : request.getString(key);
         LOG.debug("Setting property for {}: {}={}", operatorName, key, val);
         dagManager.setOperatorProperty(operatorName, key, val);
       }
@@ -582,7 +582,7 @@ public class StramWebServices
       Iterator<String> keys = request.keys();
       while (keys.hasNext()) {
         String key = keys.next();
-        String val = request.getString(key);
+        String val = request.isNull(key) ? null : request.getString(key);
         dagManager.setPhysicalOperatorProperty(operatorId, key, val);
       }
     }


[30/50] [abbrv] incubator-apex-core git commit: Merge branch 'gaurav-IdleTimeHandler' into devel-3

Posted by vr...@apache.org.
Merge branch 'gaurav-IdleTimeHandler' into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/0936445b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/0936445b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/0936445b

Branch: refs/heads/feature-module
Commit: 0936445bd06f68abd899eff2cac5f2cbb01cbaf7
Parents: c0baa9d 7801b7a
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Tue Sep 15 11:54:45 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Tue Sep 15 11:54:45 2015 -0700

----------------------------------------------------------------------
 engine/src/main/java/com/datatorrent/stram/engine/GenericNode.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[05/50] [abbrv] incubator-apex-core git commit: APEX-22 #resolve #comment adding the port object only when it doesn't already exist

Posted by vr...@apache.org.
APEX-22 #resolve #comment adding the port object only when it doesn't already exist


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/39d5d31e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/39d5d31e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/39d5d31e

Branch: refs/heads/feature-module
Commit: 39d5d31e50c9e4fda863d0b0f620fc02f6f8382d
Parents: 09f716e
Author: Chandni Singh <cs...@apache.org>
Authored: Wed Sep 9 16:37:32 2015 -0700
Committer: Chandni Singh <cs...@apache.org>
Committed: Wed Sep 9 16:46:36 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/plan/logical/Operators.java   | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/39d5d31e/engine/src/main/java/com/datatorrent/stram/plan/logical/Operators.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/Operators.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/Operators.java
index 9e08cba..57742a7 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/Operators.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/Operators.java
@@ -63,13 +63,17 @@ public abstract class Operators
     @Override
     public void addInputPort(Operator.InputPort<?> port, Field field, InputPortFieldAnnotation portAnnotation, AppData.QueryPort adqAnnotation)
     {
-      inputPorts.put(field.getName(), new PortContextPair<InputPort<?>>(port));
+      if (!inputPorts.containsKey(field.getName())) {
+        inputPorts.put(field.getName(), new PortContextPair<InputPort<?>>(port));
+      }
     }
 
     @Override
     public void addOutputPort(Operator.OutputPort<?> port, Field field, OutputPortFieldAnnotation portAnnotation, AppData.ResultPort adrAnnotation)
     {
-      outputPorts.put(field.getName(), new PortContextPair<OutputPort<?>>(port));
+      if (!outputPorts.containsKey(field.getName())) {
+        outputPorts.put(field.getName(), new PortContextPair<OutputPort<?>>(port));
+      }
     }
   };
 


[03/50] [abbrv] incubator-apex-core git commit: APEX-93 #resolve #comment Fixing dynamic partitioning issue with persist stream Added flow to redeploy persist operators as well when sink operators are dynamically repartitioned Modified dynamic repartitio

Posted by vr...@apache.org.
APEX-93 #resolve #comment Fixing dynamic partitioning issue with persist stream
Added flow to redeploy persist operators as well when sink operators are dynamically repartitioned
Modified dynamic repartitioning test case to validate that persist operator is part of the dependent operators redeployed after partitioning


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/3178f13f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/3178f13f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/3178f13f

Branch: refs/heads/feature-module
Commit: 3178f13f49695aa4f6910006ecd4efbca8dad6a9
Parents: 55a068f
Author: ishark <is...@datatorrent.com>
Authored: Thu Sep 3 19:02:02 2015 -0700
Committer: ishark <is...@datatorrent.com>
Committed: Wed Sep 9 16:02:43 2015 -0700

----------------------------------------------------------------------
 .../StreamCodecWrapperForPersistance.java       |  2 +-
 .../stram/plan/physical/PhysicalPlan.java       | 28 +++++++++++++++++++-
 .../stram/plan/StreamPersistanceTests.java      | 13 +++++++++
 3 files changed, 41 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3178f13f/engine/src/main/java/com/datatorrent/stram/plan/logical/StreamCodecWrapperForPersistance.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/StreamCodecWrapperForPersistance.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/StreamCodecWrapperForPersistance.java
index 97fd75f..81be56a 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/StreamCodecWrapperForPersistance.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/StreamCodecWrapperForPersistance.java
@@ -52,7 +52,7 @@ public class StreamCodecWrapperForPersistance<T> implements StreamCodec<T>, Seri
       Collection<PartitionKeys> partitionKeysList = entry.getValue();
 
       for (PartitionKeys keys : partitionKeysList) {
-        if (keys.partitions.contains(keys.mask & codec.getPartition(o))) {
+        if ( keys.partitions != null && keys.partitions.contains(keys.mask & codec.getPartition(o))) {
           // Then at least one of the partitions is getting this event
           // So send the event to persist operator
           return true;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3178f13f/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java b/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
index 2176035..fb429a9 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
@@ -420,7 +420,7 @@ public class PhysicalPlan implements Serializable
     Collection<PTOperator> ptOperators = getOperators(sinkPortMeta.getOperatorWrapper());
     Collection<PartitionKeys> partitionKeysList = new ArrayList<PartitionKeys>();
     for (PTOperator p : ptOperators) {
-      PartitionKeys keys = (PartitionKeys) p.getPartitionKeys().get(sinkPortMeta.getPortObject());
+      PartitionKeys keys = p.partitionKeys.get(sinkPortMeta);
       partitionKeysList.add(keys);
     }
 
@@ -1390,9 +1390,35 @@ public class PhysicalPlan implements Serializable
         getDeps(operator, visited);
       }
     }
+    visited.addAll(getDependentPersistOperators(operators));
     return visited;
   }
 
+  private Set<PTOperator> getDependentPersistOperators(Collection<PTOperator> operators)
+  {
+    Set<PTOperator> persistOperators = new LinkedHashSet<PTOperator>();
+    if (operators != null) {
+      for (PTOperator operator : operators) {
+        for (PTInput in : operator.inputs) {
+          if (in.logicalStream.getPersistOperator() != null) {
+            for (InputPortMeta inputPort : in.logicalStream.getSinksToPersist()) {
+              if (inputPort.getOperatorWrapper().equals(operator.operatorMeta)) {
+                // Redeploy the stream wide persist operator only if the current sink is being persisted
+                persistOperators.addAll(getOperators(in.logicalStream.getPersistOperator()));
+                break;
+              }
+            }
+          }
+          for (Entry<InputPortMeta, OperatorMeta> entry : in.logicalStream.sinkSpecificPersistOperatorMap.entrySet()) {
+            // Redeploy sink specific persist operators
+            persistOperators.addAll(getOperators(entry.getValue()));
+          }
+        }
+      }
+    }
+    return persistOperators;
+  }
+
   /**
    * Add logical operator to the plan. Assumes that upstream operators have been added before.
    * @param om

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3178f13f/engine/src/test/java/com/datatorrent/stram/plan/StreamPersistanceTests.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/StreamPersistanceTests.java b/engine/src/test/java/com/datatorrent/stram/plan/StreamPersistanceTests.java
index c82f3a9..1cd4311 100644
--- a/engine/src/test/java/com/datatorrent/stram/plan/StreamPersistanceTests.java
+++ b/engine/src/test/java/com/datatorrent/stram/plan/StreamPersistanceTests.java
@@ -1,12 +1,14 @@
 package com.datatorrent.stram.plan;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.Map.Entry;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -956,12 +958,23 @@ public class StreamPersistanceTests
 
     List<PTOperator> ptos = plan.getOperators(passThruMeta);
 
+    PTOperator persistOperatorContainer = null;
+
     for (PTContainer container : plan.getContainers()) {
       for (PTOperator operator : container.getOperators()) {
         operator.setState(PTOperator.State.ACTIVE);
+        if (operator.getName().equals("persister")) {
+          persistOperatorContainer = operator;
+        }
       }
     }
 
+    // Check that persist operator is part of dependents redeployed
+    Set<PTOperator> operators = plan.getDependents(ptos);
+    logger.debug("Operators to be re-deployed = {}", operators);
+    // Validate that persist operator is part of dependents
+    assertTrue("persist operator should be part of the operators to be redeployed", operators.contains(persistOperatorContainer));
+
     LogicalPlan.StreamMeta s1 = (LogicalPlan.StreamMeta) s;
     StreamCodec codec = s1.getPersistOperatorInputPort().getValue(PortContext.STREAM_CODEC);
 


[14/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-68' of github.com:vrozov/incubator-apex-core into vlad-apex-68

Posted by vr...@apache.org.
Merge branch 'APEX-68' of github.com:vrozov/incubator-apex-core into vlad-apex-68


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/928d3680
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/928d3680
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/928d3680

Branch: refs/heads/feature-module
Commit: 928d36804b695e35e8a59cc8acf3aabc23b296a9
Parents: 97cbef6 1b8aecf
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Thu Sep 10 17:58:24 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Thu Sep 10 17:58:24 2015 -0700

----------------------------------------------------------------------
 .../bufferserver/internal/DataList.java         | 598 +++++++++++--------
 .../bufferserver/internal/FastDataList.java     |  64 +-
 .../datatorrent/bufferserver/server/Server.java | 101 +++-
 3 files changed, 425 insertions(+), 338 deletions(-)
----------------------------------------------------------------------



[07/50] [abbrv] incubator-apex-core git commit: APEX-100 #resolve make embeddedwebsocketserver use an automatically selected port instead of hardcoded port

Posted by vr...@apache.org.
APEX-100 #resolve make embeddedwebsocketserver use an automatically selected port instead of hardcoded port


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/7888aa24
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/7888aa24
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/7888aa24

Branch: refs/heads/feature-module
Commit: 7888aa244b71f121f064800c64a2e823dff35f34
Parents: 065ddbe
Author: David Yan <da...@datatorrent.com>
Authored: Wed Sep 9 17:18:22 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Wed Sep 9 17:18:22 2015 -0700

----------------------------------------------------------------------
 .../datatorrent/stram/StreamingContainerManagerTest.java  |  4 ++--
 .../com/datatorrent/stram/support/StramTestSupport.java   | 10 +++++++++-
 2 files changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/7888aa24/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
index 2656e8d..daa9e13 100644
--- a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
@@ -901,10 +901,9 @@ public class StreamingContainerManagerTest
   @Test
   public void testAppDataPush() throws Exception
   {
-    int port = 12345;
     final String topic = "xyz";
     final List<JSONObject> messages = new ArrayList<JSONObject>();
-    EmbeddedWebSocketServer server = new EmbeddedWebSocketServer(port);
+    EmbeddedWebSocketServer server = new EmbeddedWebSocketServer(0);
     server.setWebSocket(new WebSocket.OnTextMessage()
     {
 
@@ -930,6 +929,7 @@ public class StreamingContainerManagerTest
     });
     try {
       server.start();
+      int port = server.getPort();
       LogicalPlan dag = new LogicalPlan();
       dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir, null));
       TestGeneratorInputOperator o1 = dag.addOperator("o1", TestGeneratorInputOperator.class);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/7888aa24/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java b/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java
index 71a402e..efd44ba 100644
--- a/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java
+++ b/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java
@@ -533,7 +533,7 @@ abstract public class StramTestSupport
 
     private final Logger LOG = LoggerFactory.getLogger(EmbeddedWebSocketServer.class);
 
-    private final int port;
+    private int port;
     private Server server;
     private WebSocket websocket;
 
@@ -570,6 +570,14 @@ abstract public class StramTestSupport
 
       contextHandler.addServlet(new ServletHolder(webSocketServlet), "/pubsub");
       server.start();
+      if (port == 0) {
+        port = server.getConnectors()[0].getLocalPort();
+      }
+    }
+
+    public int getPort()
+    {
+      return port;
     }
 
     public void stop() throws Exception


[41/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-28_pull_latest' into devel-3

Posted by vr...@apache.org.
Merge branch 'APEX-28_pull_latest' into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/454fecca
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/454fecca
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/454fecca

Branch: refs/heads/feature-module
Commit: 454feccacd280996624db339e6270bb0f20ebccc
Parents: 7503dde 977093e
Author: Chandni Singh <cs...@apache.org>
Authored: Wed Sep 16 21:52:27 2015 -0700
Committer: Chandni Singh <cs...@apache.org>
Committed: Wed Sep 16 21:52:27 2015 -0700

----------------------------------------------------------------------
 .../stram/plan/logical/LogicalPlan.java         |   10 +-
 .../plan/logical/LogicalPlanConfiguration.java  | 1305 ++++++++++++---
 .../plan/LogicalPlanConfigurationTest.java      |  876 ----------
 .../datatorrent/stram/plan/LogicalPlanTest.java |  988 ------------
 .../logical/LogicalPlanConfigurationTest.java   | 1511 ++++++++++++++++++
 .../stram/plan/logical/LogicalPlanTest.java     |  988 ++++++++++++
 .../stram/plan/logical/MockStorageAgent.java    |   67 +
 .../src/test/resources/schemaTestTopology.json  |    2 +-
 engine/src/test/resources/testTopology.json     |    4 +-
 9 files changed, 3639 insertions(+), 2112 deletions(-)
----------------------------------------------------------------------



[02/50] [abbrv] incubator-apex-core git commit: APEX-68: Buffer server should use a separate thread to spool blocks to disk

Posted by vr...@apache.org.
APEX-68: Buffer server should use a separate thread to spool blocks to disk


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/1b8aecf3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/1b8aecf3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/1b8aecf3

Branch: refs/heads/feature-module
Commit: 1b8aecf3b429069b92a790a4aae2e41490934de7
Parents: f2a4071
Author: Vlad Rozov <v....@datatorrent.com>
Authored: Wed Sep 9 15:41:30 2015 -0700
Committer: Vlad Rozov <v....@datatorrent.com>
Committed: Wed Sep 9 15:41:30 2015 -0700

----------------------------------------------------------------------
 .../bufferserver/internal/DataList.java         | 598 +++++++++++--------
 .../bufferserver/internal/FastDataList.java     |  64 +-
 .../datatorrent/bufferserver/server/Server.java | 101 +++-
 3 files changed, 425 insertions(+), 338 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/1b8aecf3/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/DataList.java
----------------------------------------------------------------------
diff --git a/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/DataList.java b/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/DataList.java
index baa052a..6806168 100644
--- a/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/DataList.java
+++ b/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/DataList.java
@@ -19,6 +19,8 @@ import java.io.IOException;
 import java.util.*;
 import java.util.Map.Entry;
 import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -32,8 +34,13 @@ import com.datatorrent.bufferserver.util.BitVector;
 import com.datatorrent.bufferserver.util.Codec;
 import com.datatorrent.bufferserver.util.SerializedData;
 import com.datatorrent.bufferserver.util.VarInt;
+import com.datatorrent.netlet.AbstractClient;
 import com.datatorrent.netlet.util.VarInt.MutableInt;
 
+import static com.google.common.collect.Lists.newArrayList;
+import static com.google.common.collect.Maps.newHashMap;
+import static com.google.common.collect.Sets.newHashSet;
+
 /**
  * Maintains list of data and manages addition and deletion of the data<p>
  * <br>
@@ -44,83 +51,137 @@ public class DataList
 {
   private final int MAX_COUNT_OF_INMEM_BLOCKS;
   protected final String identifier;
-  private final Integer blocksize;
-  private HashMap<BitVector, HashSet<DataListener>> listeners = new HashMap<BitVector, HashSet<DataListener>>();
-  protected HashSet<DataListener> all_listeners = new HashSet<DataListener>();
+  private final int blockSize;
+  private final HashMap<BitVector, HashSet<DataListener>> listeners = newHashMap();
+  protected final HashSet<DataListener> all_listeners = newHashSet();
+  protected final HashMap<String, DataListIterator> iterators = newHashMap();
   protected Block first;
   protected Block last;
   protected Storage storage;
-  protected ExecutorService autoflushExecutor;
+  protected ExecutorService autoFlushExecutor;
   protected ExecutorService storageExecutor;
+  protected int size;
+  protected int processingOffset;
+  protected long baseSeconds;
+  private final List<AbstractClient> suspendedClients = newArrayList();
+  private final AtomicInteger numberOfInMemBlockPermits;
+  private MutableInt nextOffset = new MutableInt();
+
+  public DataList(final String identifier, final int blockSize, final int numberOfCacheBlocks)
+  {
+    if (numberOfCacheBlocks < 1) {
+      throw new IllegalArgumentException("Invalid number of Data List Memory blocks " + numberOfCacheBlocks);
+    }
+    this.MAX_COUNT_OF_INMEM_BLOCKS = numberOfCacheBlocks;
+    numberOfInMemBlockPermits = new AtomicInteger(MAX_COUNT_OF_INMEM_BLOCKS - 1);
+    this.identifier = identifier;
+    this.blockSize = blockSize;
+    first = last = new Block(identifier, blockSize);
+  }
 
-  public int getBlockSize()
+  public DataList(String identifier)
   {
-    return blocksize;
+    /*
+     * We use 64MB (the default HDFS block getSize) as the getSize of the memory pool so we can flush the data 1 block at a time to the filesystem.
+     * we will use default value of 8 block sizes to be cached in memory
+     */
+    this(identifier, 64 * 1024 * 1024, 8);
   }
 
-  public void rewind(int baseSeconds, int windowId) throws IOException
+  public int getBlockSize()
   {
-    long longWindowId = (long)baseSeconds << 32 | windowId;
+    return blockSize;
+  }
 
-    for (Block temp = first; temp != null; temp = temp.next) {
+  public void rewind(final int baseSeconds, final int windowId) throws IOException
+  {
+    final long longWindowId = (long)baseSeconds << 32 | windowId;
+    logger.debug("Rewinding {} from window ID {} to window ID {}", this, Codec.getStringWindowId(last.ending_window), Codec.getStringWindowId(longWindowId));
 
-      if (temp.starting_window >= longWindowId || temp.ending_window > longWindowId) {
-        if (temp != last) {
-          temp.next = null;
-          last = temp;
+    int numberOfInMemBlockRewound = 0;
+    synchronized (this) {
+      for (Block temp = first; temp != null; temp = temp.next) {
+        if (temp.starting_window >= longWindowId || temp.ending_window > longWindowId) {
+          if (temp != last) {
+            last = temp;
+            do {
+              temp = temp.next;
+              temp.discard(false);
+              if (temp.data != null) {
+                temp.data = null;
+                numberOfInMemBlockRewound++;
+              }
+            } while (temp.next != null);
+            last.next = null;
+            last.acquire(true);
+          }
+          this.baseSeconds = last.rewind(longWindowId);
+          processingOffset = last.writingOffset;
+          size = 0;
+          break;
         }
-
-        this.baseSeconds = temp.rewind(longWindowId);
-        processingOffset = temp.writingOffset;
-        size = 0;
       }
     }
 
-    for (DataListIterator dli : iterators.values()) {
-      dli.rewind(processingOffset);
-    }
+    /*
+      TODO: properly rewind Data List iterators, especially handle case when iterators point to blocks past the last block.
+    */
+
+    final int numberOfInMemBlockPermits = this.numberOfInMemBlockPermits.addAndGet(numberOfInMemBlockRewound);
+    assert numberOfInMemBlockPermits < MAX_COUNT_OF_INMEM_BLOCKS : "Number of in memory block permits " + numberOfInMemBlockPermits + " exceeded configured maximum " + MAX_COUNT_OF_INMEM_BLOCKS + '.';
+    resumeSuspendedClients(numberOfInMemBlockPermits);
+    logger.debug("Discarded {} in memory blocks during rewind. Number of in memory blocks permits {} after rewinding {}. ", numberOfInMemBlockRewound, numberOfInMemBlockPermits, this);
+
   }
 
   public void reset()
   {
+    logger.debug("Resetting {}", this);
     listeners.clear();
     all_listeners.clear();
 
-    if (storage != null) {
-      while (first != null) {
-        if (first.uniqueIdentifier > 0) {
-          logger.debug("discarding {} {} in reset", identifier, first.uniqueIdentifier);
-          storage.discard(identifier, first.uniqueIdentifier);
+    synchronized (this) {
+      if (storage != null) {
+        Block temp = first;
+        while (temp != last) {
+          temp.discard(false);
+          temp.data = null;
+          temp = temp.next;
         }
-        first = first.next;
       }
+      first = last;
     }
+    numberOfInMemBlockPermits.set(MAX_COUNT_OF_INMEM_BLOCKS - 1);
   }
 
-  public void purge(int baseSeconds, int windowId)
+  public void purge(final int baseSeconds, final int windowId)
   {
-    long longWindowId = (long)baseSeconds << 32 | windowId;
-    logger.debug("purge request for windowId {}", Codec.getStringWindowId(longWindowId));
-
-    Block prev = null;
-    for (Block temp = first; temp != null && temp.starting_window <= longWindowId; temp = temp.next) {
-      if (temp.ending_window > longWindowId || temp == last) {
-        if (prev != null) {
-          first = temp;
+    final long longWindowId = (long)baseSeconds << 32 | windowId;
+    logger.debug("Purging {} from window ID {} to window ID {}", this, Codec.getStringWindowId(first.starting_window), Codec.getStringWindowId(longWindowId));
+
+    int numberOfInMemBlockPurged = 0;
+    synchronized (this) {
+      for (Block prev = null, temp = first; temp != null && temp.starting_window <= longWindowId; prev = temp, temp = temp.next) {
+        if (temp.ending_window > longWindowId || temp == last) {
+          if (prev != null) {
+            first = temp;
+          }
+          first.purge(longWindowId);
+          break;
+        }
+        temp.discard(false);
+        if (temp.data != null) {
+          temp.data = null;
+          numberOfInMemBlockPurged++;
         }
-
-        first.purge(longWindowId);
-        break;
       }
+    }
 
-      if (storage != null && temp.uniqueIdentifier > 0) {
-        logger.debug("discarding {} {} in purge", identifier, temp.uniqueIdentifier);
-
-        storage.discard(identifier, temp.uniqueIdentifier);
-      }
+    final int numberOfInMemBlockPermits = this.numberOfInMemBlockPermits.addAndGet(numberOfInMemBlockPurged);
+    assert numberOfInMemBlockPermits < MAX_COUNT_OF_INMEM_BLOCKS : "Number of in memory block permits " + numberOfInMemBlockPermits + " exceeded configured maximum " + MAX_COUNT_OF_INMEM_BLOCKS + '.';
+    resumeSuspendedClients(numberOfInMemBlockPermits);
+    logger.debug("Discarded {} in memory blocks during purge. Number of in memory blocks permits {} after purging {}. ", numberOfInMemBlockPurged, numberOfInMemBlockPermits, this);
 
-      prev = temp;
-    }
   }
 
   /**
@@ -131,35 +192,6 @@ public class DataList
     return identifier;
   }
 
-  public DataList(String identifier, int blocksize, int numberOfCacheBlocks, int refCount)
-  {
-    this(identifier, blocksize, numberOfCacheBlocks);
-    first.refCount = refCount;
-  }
-
-  public DataList(String identifier, int blocksize, int numberOfCacheBlocks)
-  {
-    this.MAX_COUNT_OF_INMEM_BLOCKS = numberOfCacheBlocks;
-    this.identifier = identifier;
-    this.blocksize = blocksize;
-    first = new Block(identifier, blocksize);
-    last = first;
-  }
-
-  public DataList(String identifier)
-  {
-    /*
-     * We use 64MB (the default HDFS block getSize) as the getSize of the memory pool so we can flush the data 1 block at a time to the filesystem.
-     * we will use default value of 8 block sizes to be cached in memory
-     */
-    this(identifier, 64 * 1024 * 1024, 8);
-  }
-
-  MutableInt nextOffset = new MutableInt();
-  long baseSeconds;
-  int size;
-  int processingOffset;
-
   public void flush(final int writeOffset)
   {
     //logger.debug("size = {}, processingOffset = {}, nextOffset = {}, writeOffset = {}", size, processingOffset, nextOffset.integer, writeOffset);
@@ -195,8 +227,7 @@ public class DataList
               last.starting_window = baseSeconds | bwt.getWindowId();
               last.ending_window = last.starting_window;
               //logger.debug("assigned both window id {}", last);
-            }
-            else {
+            } else {
               last.ending_window = baseSeconds | bwt.getWindowId();
               //logger.debug("assigned last window id {}", last);
             }
@@ -209,8 +240,7 @@ public class DataList
         }
         processingOffset += size;
         size = 0;
-      }
-      else {
+      } else {
         if (writeOffset == last.data.length) {
           nextOffset.integer = 0;
           processingOffset = 0;
@@ -218,12 +248,11 @@ public class DataList
         }
         break;
       }
-    }
-    while (true);
+    } while (true);
 
     last.writingOffset = writeOffset;
 
-    autoflushExecutor.submit(new Runnable()
+    autoFlushExecutor.submit(new Runnable()
     {
       @Override
       public void run()
@@ -236,9 +265,9 @@ public class DataList
     });
   }
 
-  public void setAutoflushExecutor(final ExecutorService es)
+  public void setAutoFlushExecutor(final ExecutorService es)
   {
-    autoflushExecutor = es;
+    autoFlushExecutor = es;
   }
 
   public void setSecondaryStorage(Storage storage, ExecutorService es)
@@ -250,13 +279,16 @@ public class DataList
   /*
    * Iterator related functions.
    */
-  protected final HashMap<String, DataListIterator> iterators = new HashMap<String, DataListIterator>();
-
-  public DataListIterator getIterator(Block block)
+  protected DataListIterator getIterator(final Block block)
   {
     return new DataListIterator(block);
   }
 
+  private synchronized Block getNextBlock(final Block block)
+  {
+    return block.next;
+  }
+
   public Iterator<SerializedData> newIterator(String identifier, long windowId)
   {
     //logger.debug("request for a new iterator {} and {}", identifier, windowId);
@@ -283,21 +315,17 @@ public class DataList
    */
   public boolean delIterator(Iterator<SerializedData> iterator)
   {
-    boolean released = false;
     if (iterator instanceof DataListIterator) {
       DataListIterator dli = (DataListIterator)iterator;
       for (Entry<String, DataListIterator> e : iterators.entrySet()) {
         if (e.getValue() == dli) {
-          if (dli.da != null) {
-            dli.da.release(false);
-          }
+          dli.close();
           iterators.remove(e.getKey());
-          released = true;
-          break;
+          return true;
         }
       }
     }
-    return released;
+    return false;
   }
 
   public void addDataListener(DataListener dl)
@@ -310,20 +338,17 @@ public class DataList
         HashSet<DataListener> set;
         if (listeners.containsKey(partition)) {
           set = listeners.get(partition);
-        }
-        else {
+        } else {
           set = new HashSet<DataListener>();
           listeners.put(partition, set);
         }
         set.add(dl);
       }
-    }
-    else {
+    } else {
       HashSet<DataListener> set;
       if (listeners.containsKey(DataListener.NULL_PARTITION)) {
         set = listeners.get(DataListener.NULL_PARTITION);
-      }
-      else {
+      } else {
         set = new HashSet<DataListener>();
         listeners.put(DataListener.NULL_PARTITION, set);
       }
@@ -341,8 +366,7 @@ public class DataList
           listeners.get(partition).remove(dl);
         }
       }
-    }
-    else {
+    } else {
       if (listeners.containsKey(DataListener.NULL_PARTITION)) {
         listeners.get(DataListener.NULL_PARTITION).remove(dl);
       }
@@ -351,43 +375,46 @@ public class DataList
     all_listeners.remove(dl);
   }
 
-  public void addBuffer(byte[] array)
+  public boolean suspendRead(final AbstractClient client)
   {
-    last.next = new Block(identifier, array);
-    last.next.starting_window = last.ending_window;
-    last.next.ending_window = last.ending_window;
-    last = last.next;
-
-    //logger.debug("addbuffer last = {}", last);
-    int inmemBlockCount;
+    synchronized (suspendedClients) {
+      return client.suspendReadIfResumed() && suspendedClients.add(client);
+    }
+  }
 
-    inmemBlockCount = 0;
-    for (Block temp = first; temp != null; temp = temp.next) {
-      if (temp.data != null) {
-        inmemBlockCount++;
+  public boolean resumeSuspendedClients(final int numberOfInMemBlockPermits)
+  {
+    boolean resumedSuspendedClients = false;
+    if (numberOfInMemBlockPermits > 0) {
+      synchronized (suspendedClients) {
+        for (AbstractClient client : suspendedClients) {
+          resumedSuspendedClients |= client.resumeReadIfSuspended();
+        }
+        suspendedClients.clear();
       }
     }
+    return resumedSuspendedClients;
+  }
 
-    if (inmemBlockCount >= MAX_COUNT_OF_INMEM_BLOCKS) {
-      //logger.debug("InmemBlockCount before releaes {}", inmemBlockCount);
-      for (Block temp = first; temp != null; temp = temp.next) {
-        boolean found = false;
-        for (DataListIterator iterator : iterators.values()) {
-          if (iterator.da == temp) {
-            found = true;
-            break;
-          }
-        }
+  public boolean isMemoryBlockAvailable()
+  {
+    return numberOfInMemBlockPermits.get() > 0;
+  }
 
-        if (!found && temp.data != null) {
-          temp.release(true);
-          if (--inmemBlockCount < MAX_COUNT_OF_INMEM_BLOCKS) {
-            break;
-          }
-        }
-      }
-      //logger.debug("InmemBlockCount after release {}", inmemBlockCount);
+  public byte[] newBuffer()
+  {
+    return new byte[blockSize];
+  }
+
+  public void addBuffer(byte[] array)
+  {
+    final int numberOfInMemBlockPermits = this.numberOfInMemBlockPermits.decrementAndGet();
+    if (numberOfInMemBlockPermits < 0) {
+      logger.warn("Exceeded allowed memory block allocation by {}", -numberOfInMemBlockPermits);
     }
+    last.next = new Block(identifier, array, last.ending_window, last.ending_window);
+    last.release(false);
+    last = last.next;
   }
 
   public byte[] getBuffer(long windowId)
@@ -461,6 +488,12 @@ public class DataList
     return status;
   }
 
+  @Override
+  public String toString()
+  {
+    return getClass().getName() + '@' + Integer.toHexString(hashCode()) + " {" + identifier + '}';
+  }
+
   /**
    * <p>Block class.</p>
    *
@@ -484,7 +517,7 @@ public class DataList
     /**
      * The starting window which is available in this data array.
      */
-    long starting_window = -1;
+    long starting_window;
     /**
      * the ending window which is available in this data array
      */
@@ -500,7 +533,8 @@ public class DataList
     /**
      * how count of references to this block.
      */
-    int refCount;
+    AtomicInteger refCount;
+    Future future;
 
     public Block(String id, int size)
     {
@@ -509,9 +543,17 @@ public class DataList
 
     public Block(String id, byte[] array)
     {
+      this(id, array, -1, 0);
+    }
+
+    public Block(final String id, final byte[] array, final long starting_window, final long ending_window)
+    {
       identifier = id;
       data = array;
-      refCount = 1;
+      refCount = new AtomicInteger(1);
+      this.starting_window = starting_window;
+      this.ending_window = ending_window;
+      //logger.debug("Allocated new {}", this);
     }
 
     void getNextData(SerializedData current)
@@ -530,27 +572,28 @@ public class DataList
     public long rewind(long windowId)
     {
       long bs = starting_window & 0x7fffffff00000000L;
-      DataListIterator dli = getIterator(this);
-      done:
-      while (dli.hasNext()) {
-        SerializedData sd = dli.next();
-        switch (sd.buffer[sd.dataOffset]) {
-          case MessageType.RESET_WINDOW_VALUE:
-            ResetWindowTuple rwt = (ResetWindowTuple)Tuple.getTuple(sd.buffer, sd.dataOffset, sd.length - sd.dataOffset + sd.offset);
-            bs = (long)rwt.getBaseSeconds() << 32;
-            if (bs > windowId) {
-              writingOffset = sd.offset;
-              break done;
-            }
-            break;
+      try (DataListIterator dli = getIterator(this)) {
+        done:
+        while (dli.hasNext()) {
+          SerializedData sd = dli.next();
+          switch (sd.buffer[sd.dataOffset]) {
+            case MessageType.RESET_WINDOW_VALUE:
+              ResetWindowTuple rwt = (ResetWindowTuple) Tuple.getTuple(sd.buffer, sd.dataOffset, sd.length - sd.dataOffset + sd.offset);
+              bs = (long)rwt.getBaseSeconds() << 32;
+              if (bs > windowId) {
+                writingOffset = sd.offset;
+                break done;
+              }
+              break;
 
-          case MessageType.BEGIN_WINDOW_VALUE:
-            BeginWindowTuple bwt = (BeginWindowTuple)Tuple.getTuple(sd.buffer, sd.dataOffset, sd.length - sd.dataOffset + sd.offset);
-            if ((bs | bwt.getWindowId()) >= windowId) {
-              writingOffset = sd.offset;
-              break done;
-            }
-            break;
+            case MessageType.BEGIN_WINDOW_VALUE:
+              BeginWindowTuple bwt = (BeginWindowTuple) Tuple.getTuple(sd.buffer, sd.dataOffset, sd.length - sd.dataOffset + sd.offset);
+              if ((bs | bwt.getWindowId()) >= windowId) {
+                writingOffset = sd.offset;
+                break done;
+              }
+              break;
+          }
         }
       }
 
@@ -558,16 +601,12 @@ public class DataList
         starting_window = windowId;
         ending_window = windowId;
         //logger.debug("assigned both window id {}", this);
-      }
-      else if (windowId < ending_window) {
+      } else if (windowId < ending_window) {
         ending_window = windowId;
         //logger.debug("assigned end window id {}", this);
       }
 
-      if (uniqueIdentifier != 0) {
-        storage.discard(identifier, uniqueIdentifier);
-        uniqueIdentifier = 0;
-      }
+      discard(false);
 
       return bs;
     }
@@ -580,39 +619,40 @@ public class DataList
       long bs = starting_window & 0xffffffff00000000L;
       SerializedData lastReset = null;
 
-      DataListIterator dli = getIterator(this);
-      done:
-      while (dli.hasNext()) {
-        SerializedData sd = dli.next();
-        switch (sd.buffer[sd.dataOffset]) {
-          case MessageType.RESET_WINDOW_VALUE:
-            ResetWindowTuple rwt = (ResetWindowTuple)Tuple.getTuple(sd.buffer, sd.dataOffset, sd.length - sd.dataOffset + sd.offset);
-            bs = (long)rwt.getBaseSeconds() << 32;
-            lastReset = sd;
-            break;
-
-          case MessageType.BEGIN_WINDOW_VALUE:
-            BeginWindowTuple bwt = (BeginWindowTuple)Tuple.getTuple(sd.buffer, sd.dataOffset, sd.length - sd.dataOffset + sd.offset);
-            if ((bs | bwt.getWindowId()) > longWindowId) {
-              found = true;
-              if (lastReset != null) {
+      try (DataListIterator dli = getIterator(this)) {
+        done:
+        while (dli.hasNext()) {
+          SerializedData sd = dli.next();
+          switch (sd.buffer[sd.dataOffset]) {
+            case MessageType.RESET_WINDOW_VALUE:
+              ResetWindowTuple rwt = (ResetWindowTuple) Tuple.getTuple(sd.buffer, sd.dataOffset, sd.length - sd.dataOffset + sd.offset);
+              bs = (long)rwt.getBaseSeconds() << 32;
+              lastReset = sd;
+              break;
+
+            case MessageType.BEGIN_WINDOW_VALUE:
+              BeginWindowTuple bwt = (BeginWindowTuple) Tuple.getTuple(sd.buffer, sd.dataOffset, sd.length - sd.dataOffset + sd.offset);
+              if ((bs | bwt.getWindowId()) > longWindowId) {
+                found = true;
+                if (lastReset != null) {
                 /*
                  * Restore the last Reset tuple if there was any and adjust the writingOffset to the beginning of the reset tuple.
                  */
-                if (sd.offset >= lastReset.length) {
-                  sd.offset -= lastReset.length;
-                  if (!(sd.buffer == lastReset.buffer && sd.offset == lastReset.offset)) {
-                    System.arraycopy(lastReset.buffer, lastReset.offset, sd.buffer, sd.offset, lastReset.length);
+                  if (sd.offset >= lastReset.length) {
+                    sd.offset -= lastReset.length;
+                    if (!(sd.buffer == lastReset.buffer && sd.offset == lastReset.offset)) {
+                      System.arraycopy(lastReset.buffer, lastReset.offset, sd.buffer, sd.offset, lastReset.length);
+                    }
                   }
+
+                  this.starting_window = bs | bwt.getWindowId();
+                  this.readingOffset = sd.offset;
+                  //logger.debug("assigned starting window id {}", this);
                 }
 
-                this.starting_window = bs | bwt.getWindowId();
-                this.readingOffset = sd.offset;
-                //logger.debug("assigned starting window id {}", this);
+                break done;
               }
-
-              break done;
-            }
+          }
         }
       }
 
@@ -654,46 +694,44 @@ public class DataList
           logger.warn("Unhandled condition while purging the data purge to offset {}", sd.offset);
         }
 
-        if (uniqueIdentifier != 0) {
-          storage.discard(identifier, uniqueIdentifier);
-          uniqueIdentifier = 0;
-        }
+        discard(false);
       }
     }
 
-    private Runnable getRetriever(final int uniqueIdentifier, final Storage storage)
+    private Runnable getRetriever()
     {
       return new Runnable()
       {
         @Override
         public void run()
         {
-          byte[] lData = storage.retrieve(identifier, uniqueIdentifier);
+          byte[] data = storage.retrieve(identifier, uniqueIdentifier);
           synchronized (Block.this) {
-            data = lData;
+            Block.this.data = data;
             readingOffset = 0;
             writingOffset = data.length;
-            if (refCount > 1) {
+            if (refCount.get() > 1) {
               Block.this.notifyAll();
             }
+            int numberOfInMemBlockPermits = DataList.this.numberOfInMemBlockPermits.decrementAndGet();
+            if (numberOfInMemBlockPermits < 0) {
+              logger.warn("Exceeded allowed memory block allocation by {}", -numberOfInMemBlockPermits);
+            }
           }
         }
-
       };
     }
 
-    synchronized void acquire(boolean wait)
+    protected void acquire(boolean wait)
     {
-      if (refCount++ == 0 && uniqueIdentifier > 0 && storage != null) {
-        assert (data == null);
+      if (refCount.getAndIncrement() == 0 && storage != null && data == null) {
+        final Runnable retriever = getRetriever();
         if (wait) {
-          getRetriever(uniqueIdentifier, storage).run();
+          retriever.run();
+        } else {
+          future = storageExecutor.submit(retriever);
         }
-        else {
-          storageExecutor.submit(getRetriever(uniqueIdentifier, storage));
-        }
-      }
-      else if (wait && data == null) {
+      } else if (wait && data == null) {
         try {
           wait();
         }
@@ -710,35 +748,70 @@ public class DataList
         @Override
         public void run()
         {
-          int i = storage.store(identifier, data, readingOffset, writingOffset);
-          if (i == 0) {
+          if (uniqueIdentifier == 0) {
+            uniqueIdentifier = storage.store(identifier, data, readingOffset, writingOffset);
+          }
+          if (uniqueIdentifier == 0) {
             logger.warn("Storage returned unexpectedly, please check the status of the spool directory!");
           }
           else {
+            //logger.debug("Spooled {} to disk", Block.this);
             synchronized (Block.this) {
-              Block.this.uniqueIdentifier = i;
-              if (refCount == 0) {
+              if (refCount.get() == 0) {
                 Block.this.data = null;
               }
             }
+            int numberOfInMemBlockPermits = DataList.this.numberOfInMemBlockPermits.incrementAndGet();
+            assert numberOfInMemBlockPermits < MAX_COUNT_OF_INMEM_BLOCKS : "Number of in memory block permits " + numberOfInMemBlockPermits + " exceeded configured maximum " + MAX_COUNT_OF_INMEM_BLOCKS + '.';
+            resumeSuspendedClients(numberOfInMemBlockPermits);
           }
         }
+      };
+    }
+
+    protected void release(boolean wait)
+    {
+      final int refCount = this.refCount.decrementAndGet();
+      if (refCount == 0 && storage != null) {
+        assert (next != null);
+        final Runnable storer = getStorer(data, readingOffset, writingOffset, storage);
+        if (wait && numberOfInMemBlockPermits.get() == 0) {
+          storer.run();
+        } else if (numberOfInMemBlockPermits.get() < MAX_COUNT_OF_INMEM_BLOCKS/2) {
+          future = storageExecutor.submit(storer);
+        }
+      } else {
+        logger.debug("Holding {} in memory due to {} references.", this, refCount);
+      }
+    }
 
+    private Runnable getDiscarder()
+    {
+      return new Runnable()
+      {
+        @Override
+        public void run()
+        {
+          if (uniqueIdentifier > 0) {
+            logger.debug("Discarding {}", Block.this);
+            storage.discard(identifier, uniqueIdentifier);
+            uniqueIdentifier = 0;
+          }
+        }
       };
     }
 
-    synchronized void release(boolean wait)
+    protected void discard(final boolean wait)
     {
-      if (--refCount == 0 && storage != null) {
-        if (uniqueIdentifier != 0) {
-          data = null;
-          return;
+      if (storage != null) {
+        if (future != null) {
+          future.cancel(false);
         }
+        final Runnable discarder = getDiscarder();
         if (wait) {
-          getStorer(data, readingOffset, writingOffset, storage).run();
-        }
-        else {
-          storageExecutor.submit(getStorer(data, readingOffset, writingOffset, storage));
+          discarder.run();
+        } else {
+          future = storageExecutor.submit(discarder);
         }
       }
     }
@@ -746,10 +819,10 @@ public class DataList
     @Override
     public String toString()
     {
-      return "Block{" + "identifier=" + identifier + ", data=" + (data == null ? "null" : data.length)
+      return getClass().getName() + '@' + Integer.toHexString(hashCode()) + "{identifier=" + identifier + ", data=" + (data == null ? "null" : data.length)
              + ", readingOffset=" + readingOffset + ", writingOffset=" + writingOffset
              + ", starting_window=" + Codec.getStringWindowId(starting_window) + ", ending_window=" + Codec.getStringWindowId(ending_window)
-             + ", uniqueIdentifier=" + uniqueIdentifier + ", next=" + (next == null ? "null" : next.identifier)
+             + ", refCount=" + refCount.get() + ", uniqueIdentifier=" + uniqueIdentifier + ", next=" + (next == null ? "null" : next.identifier)
              + '}';
     }
 
@@ -760,7 +833,7 @@ public class DataList
    *
    * @since 0.3.2
    */
-  public class DataListIterator implements Iterator<SerializedData>
+  public class DataListIterator implements Iterator<SerializedData>, AutoCloseable
   {
     Block da;
     SerializedData current;
@@ -792,12 +865,28 @@ public class DataList
       return readOffset;
     }
 
+    protected boolean switchToNextBlock()
+    {
+      Block next = getNextBlock(da);
+      if (next == null) {
+        return false;
+      }
+      //logger.debug("{}: switching to the next block {}->{}", this, da, da.next);
+      next.acquire(true);
+      da.release(false);
+      da = next;
+      size = 0;
+      buffer = da.data;
+      readOffset = da.readingOffset;
+      return true;
+    }
+
     /**
      *
      * @return boolean
      */
     @Override
-    public synchronized boolean hasNext()
+    public boolean hasNext()
     {
       while (size == 0) {
         size = VarInt.read(buffer, readOffset, da.writingOffset, nextOffset);
@@ -810,53 +899,26 @@ public class DataList
           case -2:
           case -1:
           case 0:
-            if (da.writingOffset == buffer.length) {
-              if (da.next == null) {
-                return false;
-              }
-
-              da.release(false);
-              da.next.acquire(true);
-              da = da.next;
-              size = 0;
-              buffer = da.data;
-              readOffset = da.readingOffset;
-            }
-            else {
-              return false;
+            if (da.writingOffset == buffer.length && switchToNextBlock()) {
+              continue;
             }
+            return false;
         }
       }
 
-      while (true) {
-        if (nextOffset.integer + size <= da.writingOffset) {
-          current = new SerializedData(buffer, readOffset, size + nextOffset.integer - readOffset);
-          current.dataOffset = nextOffset.integer;
-          //if (buffer[current.dataOffset] == MessageType.BEGIN_WINDOW_VALUE || buffer[current.dataOffset] == MessageType.END_WINDOW_VALUE) {
-          //  Tuple t = Tuple.getTuple(current.buffer, current.dataOffset, current.length - current.dataOffset + current.offset);
-          //  logger.debug("next t = {}", t);
-          //}
-          return true;
-        }
-        else {
-          if (da.writingOffset == buffer.length) {
-            if (da.next == null) {
-              return false;
-            }
-            else {
-              da.release(false);
-              da.next.acquire(true);
-              da = da.next;
-              size = 0;
-              readOffset = nextOffset.integer = da.readingOffset;
-              buffer = da.data;
-            }
-          }
-          else {
-            return false;
-          }
-        }
+      if (nextOffset.integer + size <= da.writingOffset) {
+        current = new SerializedData(buffer, readOffset, size + nextOffset.integer - readOffset);
+        current.dataOffset = nextOffset.integer;
+        //if (buffer[current.dataOffset] == MessageType.BEGIN_WINDOW_VALUE || buffer[current.dataOffset] == MessageType.END_WINDOW_VALUE) {
+        //  Tuple t = Tuple.getTuple(current.buffer, current.dataOffset, current.length - current.dataOffset + current.offset);
+        //  logger.debug("next t = {}", t);
+        //}
+        return true;
+      } else if (da.writingOffset == buffer.length && switchToNextBlock()) {
+        nextOffset.integer = da.readingOffset;
+        return hasNext();
       }
+      return false;
     }
 
     /**
@@ -882,6 +944,16 @@ public class DataList
       current.buffer[current.dataOffset] = MessageType.NO_MESSAGE_VALUE;
     }
 
+    @Override
+    public void close()
+    {
+      if (da != null) {
+        da.release(false);
+        da = null;
+        buffer = null;
+      }
+    }
+
     void rewind(int processingOffset)
     {
       readOffset = processingOffset;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/1b8aecf3/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/FastDataList.java
----------------------------------------------------------------------
diff --git a/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/FastDataList.java b/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/FastDataList.java
index d260b37..fe0d9f4 100644
--- a/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/FastDataList.java
+++ b/bufferserver/src/main/java/com/datatorrent/bufferserver/internal/FastDataList.java
@@ -39,12 +39,6 @@ public class FastDataList extends DataList
     super(identifier, blocksize, numberOfCacheBlocks);
   }
 
-  public FastDataList(String identifier, int blocksize, int numberOfCacheBlocks, int refCount)
-  {
-    super(identifier, blocksize, numberOfCacheBlocks, refCount);
-  }
-
-
   long item;
 
   @Override
@@ -102,7 +96,7 @@ public class FastDataList extends DataList
 
     last.writingOffset = writeOffset;
 
-    autoflushExecutor.submit(new Runnable()
+    autoFlushExecutor.submit(new Runnable()
     {
       @Override
       public void run()
@@ -116,7 +110,7 @@ public class FastDataList extends DataList
   }
 
   @Override
-  public FastDataListIterator getIterator(Block block)
+  protected FastDataListIterator getIterator(Block block)
   {
     return new FastDataListIterator(block);
   }
@@ -188,59 +182,37 @@ public class FastDataList extends DataList
     }
 
     @Override
-    public synchronized boolean hasNext()
+    public boolean hasNext()
     {
       while (size == 0) {
         if (da.writingOffset - readOffset >= 2) {
           size = buffer[readOffset];
           size |= (buffer[readOffset + 1] << 8);
-        }
-        else {
-          if (da.writingOffset == buffer.length) {
-            if (da.next == null) {
-              return false;
-            }
-
-            da.release(false);
-            da.next.acquire(true);
-            da = da.next;
-            size = 0;
-            buffer = da.data;
-            readOffset = da.readingOffset;
-          }
-          else {
+        } else {
+          if (da.writingOffset == buffer.length && switchToNextBlock()) {
+            continue;
+          } else {
             return false;
           }
         }
       }
 
-      while (true) {
-        if (readOffset + size + 2 <= da.writingOffset) {
-          current = new SerializedData(buffer, readOffset, size + 2);
-          current.dataOffset = readOffset + 2;
-          return true;
-        }
-        else {
-          if (da.writingOffset == buffer.length) {
-            if (da.next == null) {
-              return false;
-            }
-            else {
-              da.release(false);
-              da.next.acquire(true);
-              da = da.next;
-              size = 0;
-              readOffset = nextOffset.integer = da.readingOffset;
-              buffer = da.data;
-            }
-          }
-          else {
+      if (readOffset + size + 2 <= da.writingOffset) {
+        current = new SerializedData(buffer, readOffset, size + 2);
+        current.dataOffset = readOffset + 2;
+        return true;
+      } else {
+        if (da.writingOffset == buffer.length) {
+          if (!switchToNextBlock()) {
             return false;
           }
+          nextOffset.integer = da.readingOffset;
+          return hasNext();
+        } else {
+          return false;
         }
       }
     }
-
   }
 
   private static final Logger logger = LoggerFactory.getLogger(FastDataList.class);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/1b8aecf3/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java
----------------------------------------------------------------------
diff --git a/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java b/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java
index 7fb4823..33a2442 100644
--- a/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java
+++ b/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java
@@ -97,16 +97,16 @@ public class Server implements ServerListener
   @Override
   public void unregistered(SelectionKey key)
   {
-    serverHelperExecutor.shutdown();
-    storageHelperExecutor.shutdown();
-    try {
-      serverHelperExecutor.awaitTermination(5000, TimeUnit.MILLISECONDS);
-    }
-    catch (InterruptedException ex) {
-      logger.debug("Executor Termination", ex);
-    }
-    logger.info("Server stopped listening at {}", address);
-  }
+        serverHelperExecutor.shutdown();
+        storageHelperExecutor.shutdown();
+        try {
+          serverHelperExecutor.awaitTermination(5000, TimeUnit.MILLISECONDS);
+        }
+        catch (InterruptedException ex) {
+          logger.debug("Executor Termination", ex);
+        }
+        logger.info("Server stopped listening at {}", address);
+      }
 
   public synchronized InetSocketAddress run(EventLoop eventloop)
   {
@@ -262,7 +262,7 @@ public class Server implements ServerListener
         //logger.debug("old list = {}", dl);
       }
       else {
-        dl = Tuple.FAST_VERSION.equals(request.getVersion()) ? new FastDataList(upstream_identifier, blockSize, numberOfCacheBlocks, 0) : new DataList(upstream_identifier, blockSize, numberOfCacheBlocks, 0);
+        dl = Tuple.FAST_VERSION.equals(request.getVersion()) ? new FastDataList(upstream_identifier, blockSize, numberOfCacheBlocks) : new DataList(upstream_identifier, blockSize, numberOfCacheBlocks);
         publisherBuffers.put(upstream_identifier, dl);
         //logger.debug("new list = {}", dl);
       }
@@ -401,7 +401,7 @@ public class Server implements ServerListener
           PublishRequestTuple publisherRequest = (PublishRequestTuple)request;
 
           DataList dl = handlePublisherRequest(publisherRequest, this);
-          dl.setAutoflushExecutor(serverHelperExecutor);
+          dl.setAutoFlushExecutor(serverHelperExecutor);
 
           Publisher publisher;
           if (publisherRequest.getVersion().equals(Tuple.FAST_VERSION)) {
@@ -616,6 +616,32 @@ public class Server implements ServerListener
       dirty = true;
     }
 
+    /**
+     * Schedules a task to conditionally resume I/O channel read operations. No-op if {@linkplain java.nio.channels.SelectionKey#OP_READ OP_READ}
+     * is already set in the key {@linkplain java.nio.channels.SelectionKey#interestOps() interestOps}. Otherwise, calls {@linkplain #read(int) read(0)}
+     * to process data left in the Publisher read buffer and registers {@linkplain java.nio.channels.SelectionKey#OP_READ OP_READ} in the key
+     * {@linkplain java.nio.channels.SelectionKey#interestOps() interestOps}.
+     * @return true
+     */
+    @Override
+    public boolean resumeReadIfSuspended()
+    {
+      eventloop.submit(new Runnable()
+      {
+        @Override
+        public void run()
+        {
+          final int interestOps = key.interestOps();
+          if ((interestOps & SelectionKey.OP_READ) == 0) {
+            logger.debug("Resuming read on key {} with attachment {}", key, key.attachment());
+            read(0);
+            key.interestOps(interestOps | SelectionKey.OP_READ);
+          }
+        }
+      });
+      return true;
+    }
+
     @Override
     public void read(int len)
     {
@@ -634,7 +660,9 @@ public class Server implements ServerListener
                    * so we allocate a new byteBuffer and copy over the partially written data to the
                    * new byteBuffer and start as if we always had full room but not enough data.
                    */
-                  switchToNewBuffer(buffer, readOffset);
+                  if (!switchToNewBufferOrSuspendRead(buffer, readOffset)) {
+                    return;
+                  }
                 }
               }
               else if (dirty) {
@@ -660,10 +688,13 @@ public class Server implements ServerListener
             /*
              * hit wall while writing serialized data, so have to allocate a new byteBuffer.
              */
-            switchToNewBuffer(buffer, readOffset - VarInt.getSize(size));
+            if (!switchToNewBufferOrSuspendRead(buffer, readOffset - VarInt.getSize(size))) {
+              readOffset -= VarInt.getSize(size);
+              size = 0;
+              return;
+            }
             size = 0;
-          }
-          else if (dirty) {
+          } else if (dirty) {
             dirty = false;
             datalist.flush(writeOffset);
           }
@@ -673,21 +704,33 @@ public class Server implements ServerListener
       while (true);
     }
 
-    public void switchToNewBuffer(byte[] array, int offset)
+    private boolean switchToNewBufferOrSuspendRead(final byte[] array, final int offset)
     {
-      byte[] newBuffer = new byte[datalist.getBlockSize()];
-      byteBuffer = ByteBuffer.wrap(newBuffer);
-      if (array == null || array.length - offset == 0) {
-        writeOffset = 0;
+      if (switchToNewBuffer(array, offset)) {
+        return true;
       }
-      else {
-        writeOffset = array.length - offset;
-        System.arraycopy(buffer, offset, newBuffer, 0, writeOffset);
-        byteBuffer.position(writeOffset);
+      datalist.suspendRead(this);
+      return false;
+    }
+
+    private boolean switchToNewBuffer(final byte[] array, final int offset)
+    {
+      if (datalist.isMemoryBlockAvailable()) {
+        final byte[] newBuffer = datalist.newBuffer();
+        byteBuffer = ByteBuffer.wrap(newBuffer);
+        if (array == null || array.length - offset == 0) {
+          writeOffset = 0;
+        } else {
+          writeOffset = array.length - offset;
+          System.arraycopy(buffer, offset, newBuffer, 0, writeOffset);
+          byteBuffer.position(writeOffset);
+        }
+        buffer = newBuffer;
+        readOffset = 0;
+        datalist.addBuffer(buffer);
+        return true;
       }
-      buffer = newBuffer;
-      readOffset = 0;
-      datalist.addBuffer(buffer);
+      return false;
     }
 
     @Override
@@ -714,7 +757,7 @@ public class Server implements ServerListener
     @Override
     public String toString()
     {
-      return "Server.Publisher{" + "datalist=" + datalist + '}';
+      return getClass().getName() + '@' + Integer.toHexString(hashCode()) + " {datalist=" + datalist + '}';
     }
 
     private volatile boolean torndown;



[21/50] [abbrv] incubator-apex-core git commit: Fixed Changelog Format

Posted by vr...@apache.org.
Fixed Changelog Format

Conflicts:
	CHANGELOG.md


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/9aa8514a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/9aa8514a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/9aa8514a

Branch: refs/heads/feature-module
Commit: 9aa8514afd0c97f3561c25843da4ecdcdd5eeb80
Parents: f296129
Author: DataTorrent CI <je...@datatorrent.com>
Authored: Thu Sep 10 18:05:50 2015 +0530
Committer: Aniruddha Thombre <an...@aniruddhas.com>
Committed: Mon Sep 14 18:49:01 2015 +0530

----------------------------------------------------------------------
 CHANGELOG.md | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/9aa8514a/CHANGELOG.md
----------------------------------------------------------------------
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 141ff79..af1ca53 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,22 @@
 Apex Changelog
 ========================================================================================================================
 
+
+Version 3.1.0
+------------------------------------------------------------------------------------------------------------------------
+
+### Improvement
+* [APEX-12] - Fix Base Operator To Not Show Name Property In App Builder
+
+### Bug
+* [APEX-35] - Test exceptions due to missing directory in saveMetaInfo
+* [APEX-36] - FSStorageAgent to account for HDFS lease when writing checkpoint files
+* [APEX-37] - Container and operator json line file in StreamingContainerManager should not be appended from previous app attempt 
+* [APEX-43] - SchemaSupport: TUPLE_CLASS attribute should use Class2String StringCodec
+* [APEX-56] - Controlled plan modification on operator shutdown 
+
+
+
 Version 3.0.0
 ------------------------------------------------------------------------------------------------------------------------
 


[37/50] [abbrv] incubator-apex-core git commit: APEX-28 #resolve

Posted by vr...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java b/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java
deleted file mode 100644
index 78173d8..0000000
--- a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java
+++ /dev/null
@@ -1,990 +0,0 @@
-/**
- * Copyright (C) 2015 DataTorrent, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *         http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datatorrent.stram.plan;
-
-import com.datatorrent.common.util.BaseOperator;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.Serializable;
-
-import java.util.*;
-
-import javax.validation.*;
-import javax.validation.constraints.AssertTrue;
-import javax.validation.constraints.Min;
-import javax.validation.constraints.NotNull;
-import javax.validation.constraints.Pattern;
-
-import com.esotericsoftware.kryo.DefaultSerializer;
-import com.esotericsoftware.kryo.serializers.JavaSerializer;
-import com.google.common.collect.Maps;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-import static org.junit.Assert.*;
-
-import com.datatorrent.common.partitioner.StatelessPartitioner;
-import com.datatorrent.api.*;
-import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.Context.PortContext;
-import com.datatorrent.api.DAG.Locality;
-import com.datatorrent.api.annotation.InputPortFieldAnnotation;
-import com.datatorrent.api.annotation.OperatorAnnotation;
-import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
-import com.datatorrent.netlet.util.Slice;
-import com.datatorrent.stram.engine.GenericTestOperator;
-import com.datatorrent.stram.engine.TestGeneratorInputOperator;
-import com.datatorrent.stram.engine.TestNonOptionalOutportInputOperator;
-import com.datatorrent.stram.engine.TestOutputOperator;
-import com.datatorrent.stram.plan.logical.LogicalPlan;
-import com.datatorrent.stram.plan.logical.LogicalPlan;
-import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
-import com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta;
-import com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent;
-import com.datatorrent.stram.support.StramTestSupport.RegexMatcher;
-
-public class LogicalPlanTest {
-
-  @Test
-  public void testCycleDetection() {
-     LogicalPlan dag = new LogicalPlan();
-
-     //NodeConf operator1 = b.getOrAddNode("operator1");
-     GenericTestOperator operator2 = dag.addOperator("operator2", GenericTestOperator.class);
-     GenericTestOperator operator3 = dag.addOperator("operator3", GenericTestOperator.class);
-     GenericTestOperator operator4 = dag.addOperator("operator4", GenericTestOperator.class);
-     //NodeConf operator5 = b.getOrAddNode("operator5");
-     //NodeConf operator6 = b.getOrAddNode("operator6");
-     GenericTestOperator operator7 = dag.addOperator("operator7", GenericTestOperator.class);
-
-     // strongly connect n2-n3-n4-n2
-     dag.addStream("n2n3", operator2.outport1, operator3.inport1);
-
-     dag.addStream("n3n4", operator3.outport1, operator4.inport1);
-
-     dag.addStream("n4n2", operator4.outport1, operator2.inport1);
-
-     // self referencing operator cycle
-     StreamMeta n7n7 = dag.addStream("n7n7", operator7.outport1, operator7.inport1);
-     try {
-       n7n7.addSink(operator7.inport1);
-       fail("cannot add to stream again");
-     } catch (Exception e) {
-       // expected, stream can have single input/output only
-     }
-
-     List<List<String>> cycles = new ArrayList<List<String>>();
-     dag.findStronglyConnected(dag.getMeta(operator7), cycles);
-     assertEquals("operator self reference", 1, cycles.size());
-     assertEquals("operator self reference", 1, cycles.get(0).size());
-     assertEquals("operator self reference", dag.getMeta(operator7).getName(), cycles.get(0).get(0));
-
-     // 3 operator cycle
-     cycles.clear();
-     dag.findStronglyConnected(dag.getMeta(operator4), cycles);
-     assertEquals("3 operator cycle", 1, cycles.size());
-     assertEquals("3 operator cycle", 3, cycles.get(0).size());
-     assertTrue("operator2", cycles.get(0).contains(dag.getMeta(operator2).getName()));
-     assertTrue("operator3", cycles.get(0).contains(dag.getMeta(operator3).getName()));
-     assertTrue("operator4", cycles.get(0).contains(dag.getMeta(operator4).getName()));
-
-     try {
-       dag.validate();
-       fail("validation should fail");
-     } catch (ValidationException e) {
-       // expected
-     }
-
-  }
-
-  public static class ValidationOperator extends BaseOperator {
-    public final transient DefaultOutputPort<Object> goodOutputPort = new DefaultOutputPort<Object>();
-
-    public final transient DefaultOutputPort<Object> badOutputPort = new DefaultOutputPort<Object>();
-  }
-
-  public static class CounterOperator extends BaseOperator {
-    final public transient InputPort<Object> countInputPort = new DefaultInputPort<Object>() {
-      @Override
-      final public void process(Object payload) {
-      }
-    };
-  }
-
-  @Test
-  public void testLogicalPlanSerialization() throws Exception {
-
-    LogicalPlan dag = new LogicalPlan();
-    dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent());
-
-    ValidationOperator validationNode = dag.addOperator("validationNode", ValidationOperator.class);
-    CounterOperator countGoodNode = dag.addOperator("countGoodNode", CounterOperator.class);
-    CounterOperator countBadNode = dag.addOperator("countBadNode", CounterOperator.class);
-    //ConsoleOutputOperator echoBadNode = dag.addOperator("echoBadNode", ConsoleOutputOperator.class);
-
-    // good tuples to counter operator
-    dag.addStream("goodTuplesStream", validationNode.goodOutputPort, countGoodNode.countInputPort);
-
-    // bad tuples to separate stream and echo operator
-    // (stream with 2 outputs)
-    dag.addStream("badTuplesStream", validationNode.badOutputPort, countBadNode.countInputPort);
-
-    Assert.assertEquals("number root operators", 1, dag.getRootOperators().size());
-    Assert.assertEquals("root operator id", "validationNode", dag.getRootOperators().get(0).getName());
-
-    dag.getContextAttributes(countGoodNode).put(OperatorContext.SPIN_MILLIS, 10);
-
-    ByteArrayOutputStream bos = new ByteArrayOutputStream();
-    LogicalPlan.write(dag, bos);
-
-    // System.out.println("serialized size: " + bos.toByteArray().length);
-
-    ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
-    LogicalPlan dagClone = LogicalPlan.read(bis);
-    Assert.assertNotNull(dagClone);
-    Assert.assertEquals("number operators in clone", dag.getAllOperators().size(), dagClone.getAllOperators().size());
-    Assert.assertEquals("number root operators in clone", 1, dagClone.getRootOperators().size());
-    Assert.assertTrue("root operator in operators", dagClone.getAllOperators().contains(dagClone.getRootOperators().get(0)));
-
-
-    Operator countGoodNodeClone = dagClone.getOperatorMeta("countGoodNode").getOperator();
-    Assert.assertEquals("", new Integer(10), dagClone.getContextAttributes(countGoodNodeClone).get(OperatorContext.SPIN_MILLIS));
-
-  }
-
-  @Test
-  public void testDeleteOperator()
-  {
-    LogicalPlan dag = new LogicalPlan();
-    TestGeneratorInputOperator input = dag.addOperator("input1", TestGeneratorInputOperator.class);
-    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
-    GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
-    dag.addStream("s0", input.outport, o1.inport1);
-    StreamMeta s1 = dag.addStream("s1", o1.outport1, o2.inport1);
-    dag.validate();
-    Assert.assertEquals("", 3, dag.getAllOperators().size());
-
-    dag.removeOperator(o2);
-    s1.remove();
-    dag.validate();
-    Assert.assertEquals("", 2, dag.getAllOperators().size());
-  }
-
-  public static class ValidationTestOperator extends BaseOperator implements InputOperator {
-    @NotNull
-    @Pattern(regexp=".*malhar.*", message="Value has to contain 'malhar'!")
-    private String stringField1;
-
-    @Min(2)
-    private int intField1;
-
-    @AssertTrue(message="stringField1 should end with intField1")
-    private boolean isValidConfiguration() {
-      return stringField1.endsWith(String.valueOf(intField1));
-    }
-
-    private String getterProperty2 = "";
-
-    @NotNull
-    public String getProperty2() {
-      return getterProperty2;
-    }
-
-    public void setProperty2(String s) {
-      // annotations need to be on the getter
-      getterProperty2 = s;
-    }
-
-    private String[] stringArrayField;
-
-    public String[] getStringArrayField() {
-      return stringArrayField;
-    }
-
-    public void setStringArrayField(String[] stringArrayField) {
-      this.stringArrayField = stringArrayField;
-    }
-
-    public class Nested {
-      @NotNull
-      private String property = "";
-
-      public String getProperty() {
-        return property;
-      }
-
-      public void setProperty(String property) {
-        this.property = property;
-      }
-
-    }
-
-    @Valid
-    private final Nested nestedBean = new Nested();
-
-    private String stringProperty2;
-
-    public String getStringProperty2() {
-      return stringProperty2;
-    }
-
-    public void setStringProperty2(String stringProperty2) {
-      this.stringProperty2 = stringProperty2;
-    }
-
-    private Map<String, String> mapProperty = Maps.newHashMap();
-
-    public Map<String, String> getMapProperty()
-    {
-      return mapProperty;
-    }
-
-    public void setMapProperty(Map<String, String> mapProperty)
-    {
-      this.mapProperty = mapProperty;
-    }
-
-    @Override
-    public void emitTuples() {
-      // Emit no tuples
-
-    }
-
-  }
-
-  @Test
-  public void testOperatorValidation() {
-
-    ValidationTestOperator bean = new ValidationTestOperator();
-    bean.stringField1 = "malhar1";
-    bean.intField1 = 1;
-
-    // ensure validation standalone produces expected results
-    ValidatorFactory factory =
-        Validation.buildDefaultValidatorFactory();
-    Validator validator = factory.getValidator();
-    Set<ConstraintViolation<ValidationTestOperator>> constraintViolations =
-             validator.validate(bean);
-    //for (ConstraintViolation<ValidationTestOperator> cv : constraintViolations) {
-    //  System.out.println("validation error: " + cv);
-    //}
-    Assert.assertEquals("" + constraintViolations,1, constraintViolations.size());
-    ConstraintViolation<ValidationTestOperator> cv = constraintViolations.iterator().next();
-    Assert.assertEquals("", bean.intField1, cv.getInvalidValue());
-    Assert.assertEquals("", "intField1", cv.getPropertyPath().toString());
-
-    // ensure DAG validation produces matching results
-    LogicalPlan dag = new LogicalPlan();
-    bean = dag.addOperator("testOperator", bean);
-
-    try {
-      dag.validate();
-      Assert.fail("should throw ConstraintViolationException");
-    } catch (ConstraintViolationException e) {
-      Assert.assertEquals("violation details", constraintViolations, e.getConstraintViolations());
-      String expRegex = ".*ValidationTestOperator\\{name=null}, propertyPath='intField1', message='must be greater than or equal to 2',.*value=1}]";
-      Assert.assertThat("exception message", e.getMessage(), RegexMatcher.matches(expRegex));
-    }
-
-    try {
-      bean.intField1 = 3;
-      dag.validate();
-      Assert.fail("should throw ConstraintViolationException");
-    } catch (ConstraintViolationException e) {
-      ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
-      Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
-      Assert.assertEquals("", false, cv2.getInvalidValue());
-      Assert.assertEquals("", "validConfiguration", cv2.getPropertyPath().toString());
-    }
-    bean.stringField1 = "malhar3";
-
-    // annotated getter
-    try {
-      bean.getterProperty2 = null;
-      dag.validate();
-      Assert.fail("should throw ConstraintViolationException");
-    } catch (ConstraintViolationException e) {
-      ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
-      Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
-      Assert.assertEquals("", null, cv2.getInvalidValue());
-      Assert.assertEquals("", "property2", cv2.getPropertyPath().toString());
-    }
-    bean.getterProperty2 = "";
-
-    // nested property
-    try {
-      bean.nestedBean.property = null;
-      dag.validate();
-      Assert.fail("should throw ConstraintViolationException");
-    } catch (ConstraintViolationException e) {
-      ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
-      Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
-      Assert.assertEquals("", null, cv2.getInvalidValue());
-      Assert.assertEquals("", "nestedBean.property", cv2.getPropertyPath().toString());
-    }
-    bean.nestedBean.property = "";
-
-    // all valid
-    dag.validate();
-
-  }
-
-  @OperatorAnnotation(partitionable = false)
-  public static class TestOperatorAnnotationOperator extends BaseOperator {
-
-    @InputPortFieldAnnotation( optional = true)
-    final public transient DefaultInputPort<Object> input1 = new DefaultInputPort<Object>() {
-      @Override
-      public void process(Object tuple) {
-      }
-    };
-  }
-
-  class NoInputPortOperator extends BaseOperator {
-  }
-
-  @Test
-  public void testValidationForNonInputRootOperator() {
-    LogicalPlan dag = new LogicalPlan();
-    NoInputPortOperator x = dag.addOperator("x", new NoInputPortOperator());
-    try {
-      dag.validate();
-      Assert.fail("should fail because root operator is not input operator");
-    } catch (ValidationException e) {
-      // expected
-    }
-  }
-
-  @OperatorAnnotation(partitionable = false)
-  public static class TestOperatorAnnotationOperator2 extends BaseOperator implements Partitioner<TestOperatorAnnotationOperator2> {
-
-    @Override
-    public Collection<Partition<TestOperatorAnnotationOperator2>> definePartitions(Collection<Partition<TestOperatorAnnotationOperator2>> partitions, PartitioningContext context)
-    {
-      return null;
-    }
-
-    @Override
-    public void partitioned(Map<Integer, Partition<TestOperatorAnnotationOperator2>> partitions)
-    {
-    }
-  }
-
-  @Test
-  public void testOperatorAnnotation() {
-    LogicalPlan dag = new LogicalPlan();
-    TestGeneratorInputOperator input = dag.addOperator("input1", TestGeneratorInputOperator.class);
-    TestOperatorAnnotationOperator operator = dag.addOperator("operator1", TestOperatorAnnotationOperator.class);
-    dag.addStream("Connection", input.outport, operator.input1);
-
-
-    dag.setAttribute(operator, OperatorContext.PARTITIONER, new StatelessPartitioner<TestOperatorAnnotationOperator>(2));
-
-    try {
-      dag.validate();
-      Assert.fail("should raise operator is not partitionable for operator1");
-    } catch (ValidationException e) {
-      Assert.assertEquals("", "Operator " + dag.getMeta(operator).getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
-    }
-
-    dag.setAttribute(operator, OperatorContext.PARTITIONER, null);
-    dag.setInputPortAttribute(operator.input1, PortContext.PARTITION_PARALLEL, true);
-
-    try {
-      dag.validate();
-      Assert.fail("should raise operator is not partitionable for operator1");
-    } catch (ValidationException e) {
-      Assert.assertEquals("", "Operator " + dag.getMeta(operator).getName() + " is not partitionable but PARTITION_PARALLEL attribute is set", e.getMessage());
-    }
-
-    dag.setInputPortAttribute(operator.input1, PortContext.PARTITION_PARALLEL, false);
-    dag.validate();
-
-    dag.removeOperator(operator);
-    TestOperatorAnnotationOperator2 operator2 = dag.addOperator("operator2", TestOperatorAnnotationOperator2.class);
-
-    try {
-      dag.validate();
-      Assert.fail("should raise operator is not partitionable for operator2");
-    } catch (ValidationException e) {
-      Assert.assertEquals("Operator " + dag.getMeta(operator2).getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
-    }
-  }
-
-  @Test
-  public void testPortConnectionValidation() {
-
-    LogicalPlan dag = new LogicalPlan();
-
-    TestNonOptionalOutportInputOperator input = dag.addOperator("input1", TestNonOptionalOutportInputOperator.class);
-
-    try {
-      dag.validate();
-      Assert.fail("should raise port not connected for input1.outputPort1");
-
-    } catch (ValidationException e) {
-      Assert.assertEquals("", "Output port connection required: input1.outport1", e.getMessage());
-    }
-
-    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
-    dag.addStream("stream1", input.outport1, o1.inport1);
-    dag.validate();
-
-    // required input
-    dag.addOperator("counter", CounterOperator.class);
-    try {
-      dag.validate();
-    } catch (ValidationException e) {
-      Assert.assertEquals("", "Input port connection required: counter.countInputPort", e.getMessage());
-    }
-
-  }
-
-  @Test
-  public void testAtMostOnceProcessingModeValidation() {
-    LogicalPlan dag = new LogicalPlan();
-
-    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
-    TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
-
-    GenericTestOperator amoOper = dag.addOperator("amoOper", GenericTestOperator.class);
-    dag.setAttribute(amoOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_MOST_ONCE);
-
-    dag.addStream("input1.outport", input1.outport, amoOper.inport1);
-    dag.addStream("input2.outport", input2.outport, amoOper.inport2);
-
-    GenericTestOperator outputOper = dag.addOperator("outputOper", GenericTestOperator.class);
-    dag.setAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_LEAST_ONCE);
-    dag.addStream("aloOper.outport1", amoOper.outport1, outputOper.inport1);
-
-    try {
-      dag.validate();
-      Assert.fail("Exception expected for " + outputOper);
-    } catch (ValidationException ve) {
-      Assert.assertEquals("", ve.getMessage(), "Processing mode outputOper/AT_LEAST_ONCE not valid for source amoOper/AT_MOST_ONCE");
-    }
-    dag.setAttribute(outputOper, OperatorContext.PROCESSING_MODE, null);
-    dag.validate();
-
-    OperatorMeta outputOperOm = dag.getMeta(outputOper);
-    Assert.assertEquals("" + outputOperOm.getAttributes(), Operator.ProcessingMode.AT_MOST_ONCE, outputOperOm.getValue(OperatorContext.PROCESSING_MODE));
-
-  }
-
-    @Test
-  public void testExactlyOnceProcessingModeValidation() {
-    LogicalPlan dag = new LogicalPlan();
-
-    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
-    TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
-
-    GenericTestOperator amoOper = dag.addOperator("amoOper", GenericTestOperator.class);
-    dag.setAttribute(amoOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.EXACTLY_ONCE);
-
-    dag.addStream("input1.outport", input1.outport, amoOper.inport1);
-    dag.addStream("input2.outport", input2.outport, amoOper.inport2);
-
-    GenericTestOperator outputOper = dag.addOperator("outputOper", GenericTestOperator.class);
-    dag.addStream("aloOper.outport1", amoOper.outport1, outputOper.inport1);
-
-    try {
-      dag.validate();
-      Assert.fail("Exception expected for " + outputOper);
-    } catch (ValidationException ve) {
-      Assert.assertEquals("", ve.getMessage(), "Processing mode for outputOper should be AT_MOST_ONCE for source amoOper/EXACTLY_ONCE");
-    }
-
-    dag.setAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_LEAST_ONCE);
-
-    try {
-      dag.validate();
-      Assert.fail("Exception expected for " + outputOper);
-    } catch (ValidationException ve) {
-      Assert.assertEquals("", ve.getMessage(), "Processing mode outputOper/AT_LEAST_ONCE not valid for source amoOper/EXACTLY_ONCE");
-    }
-
-    // AT_MOST_ONCE is valid
-    dag.setAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_MOST_ONCE);
-    dag.validate();
-  }
-
-  @Test
-  public void testLocalityValidation() {
-    LogicalPlan dag = new LogicalPlan();
-
-    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
-    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
-    StreamMeta s1 = dag.addStream("input1.outport", input1.outport, o1.inport1).setLocality(Locality.THREAD_LOCAL);
-    dag.validate();
-
-    TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
-    dag.addStream("input2.outport", input2.outport, o1.inport2);
-
-    try {
-      dag.validate();
-      Assert.fail("Exception expected for " + o1);
-    } catch (ValidationException ve) {
-      Assert.assertThat("", ve.getMessage(), RegexMatcher.matches("Locality THREAD_LOCAL invalid for operator .* with multiple input streams .*"));
-    }
-
-    s1.setLocality(null);
-    dag.validate();
-  }
-
-  private class TestAnnotationsOperator extends BaseOperator implements InputOperator {
-    //final public transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<Object>();
-
-    @OutputPortFieldAnnotation( optional=false)
-    final public transient DefaultOutputPort<Object> outport2 = new DefaultOutputPort<Object>();
-
-    @Override
-    public void emitTuples() {
-      // Emit Nothing
-
-    }
-  }
-
-  private class TestAnnotationsOperator2 extends BaseOperator implements InputOperator{
-    // multiple ports w/o annotation, one of them must be connected
-    final public transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<Object>();
-
-    @Override
-    public void emitTuples() {
-      // Emit Nothing
-
-    }
-  }
-
-  private class TestAnnotationsOperator3 extends BaseOperator implements InputOperator{
-    // multiple ports w/o annotation, one of them must be connected
-    @OutputPortFieldAnnotation( optional=true)
-    final public transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<Object>();
-    @OutputPortFieldAnnotation( optional=true)
-    final public transient DefaultOutputPort<Object> outport2 = new DefaultOutputPort<Object>();
-    @Override
-    public void emitTuples() {
-      // Emit Nothing
-
-    }
-  }
-
-  @Test
-  public void testOutputPortAnnotation() {
-    LogicalPlan dag = new LogicalPlan();
-    TestAnnotationsOperator ta1 = dag.addOperator("testAnnotationsOperator", new TestAnnotationsOperator());
-
-    try {
-      dag.validate();
-      Assert.fail("should raise: port connection required");
-    } catch (ValidationException e) {
-      Assert.assertEquals("", "Output port connection required: testAnnotationsOperator.outport2", e.getMessage());
-    }
-
-    TestOutputOperator o2 = dag.addOperator("sink", new TestOutputOperator());
-    dag.addStream("s1", ta1.outport2, o2.inport);
-
-    dag.validate();
-
-    TestAnnotationsOperator2 ta2 = dag.addOperator("multiOutputPorts1", new TestAnnotationsOperator2());
-
-    try {
-      dag.validate();
-      Assert.fail("should raise: At least one output port must be connected");
-    } catch (ValidationException e) {
-      Assert.assertEquals("", "At least one output port must be connected: multiOutputPorts1", e.getMessage());
-    }
-    TestOutputOperator o3 = dag.addOperator("o3", new TestOutputOperator());
-    dag.addStream("s2", ta2.outport1, o3.inport);
-
-    dag.addOperator("multiOutputPorts3", new TestAnnotationsOperator3());
-    dag.validate();
-
-  }
-
-  /**
-   * Operator that can be used with default Java serialization instead of Kryo
-   */
-  @DefaultSerializer(JavaSerializer.class)
-  public static class JdkSerializableOperator extends BaseOperator implements Serializable {
-    private static final long serialVersionUID = -4024202339520027097L;
-
-    public abstract class SerializableInputPort<T> implements InputPort<T>, Sink<T>, java.io.Serializable {
-      private static final long serialVersionUID = 1L;
-
-      @Override
-      public Sink<T> getSink() {
-        return this;
-      }
-
-      @Override
-      public void setConnected(boolean connected) {
-      }
-
-      @Override
-      public void setup(PortContext context)
-      {
-      }
-
-      @Override
-      public void teardown()
-      {
-      }
-
-      @Override
-      public StreamCodec<T> getStreamCodec() {
-        return null;
-      }
-    }
-
-    @InputPortFieldAnnotation( optional=true)
-    final public InputPort<Object> inport1 = new SerializableInputPort<Object>() {
-      private static final long serialVersionUID = 1L;
-
-      @Override
-      final public void put(Object payload)
-      {
-      }
-
-      @Override
-      public int getCount(boolean reset)
-      {
-        return 0;
-      }
-
-    };
-  }
-
-  @Test
-  public void testJdkSerializableOperator() throws Exception {
-    LogicalPlan dag = new LogicalPlan();
-    dag.addOperator("o1", new JdkSerializableOperator());
-
-    ByteArrayOutputStream outStream = new ByteArrayOutputStream();
-    LogicalPlan.write(dag, outStream);
-    outStream.close();
-
-    LogicalPlan clonedDag = LogicalPlan.read(new ByteArrayInputStream(outStream.toByteArray()));
-    JdkSerializableOperator o1Clone = (JdkSerializableOperator)clonedDag.getOperatorMeta("o1").getOperator();
-    Assert.assertNotNull("port object null", o1Clone.inport1);
-  }
-
-  private static class TestStreamCodec implements StreamCodec<Object> {
-    @Override
-    public Object fromByteArray(Slice fragment)
-    {
-      return fragment.stringValue();
-    }
-
-    @Override
-    public Slice toByteArray(Object o)
-    {
-      byte[] b = o.toString().getBytes();
-      return new Slice(b, 0, b.length);
-    }
-
-    @Override
-    public int getPartition(Object o)
-    {
-      return o.hashCode() / 2;
-    }
-  }
-
-  public static class TestPortCodecOperator extends BaseOperator {
-    public transient final DefaultInputPort<Object> inport1 = new DefaultInputPort<Object>()
-    {
-      @Override
-      public void process(Object tuple)
-      {
-
-      }
-
-      @Override
-      public StreamCodec<Object> getStreamCodec()
-      {
-        return new TestStreamCodec();
-      }
-    };
-
-    @OutputPortFieldAnnotation( optional = true)
-    public transient final DefaultOutputPort<Object> outport = new DefaultOutputPort<Object>();
-  }
-
-  /*
-  @Test
-  public void testStreamCodec() throws Exception {
-    LogicalPlan dag = new LogicalPlan();
-    TestGeneratorInputOperator input = dag.addOperator("input", TestGeneratorInputOperator.class);
-    GenericTestOperator gto1 = dag.addOperator("gto1", GenericTestOperator.class);
-    StreamMeta stream1 = dag.addStream("s1", input.outport, gto1.inport1);
-    StreamCodec<?> codec1 = new TestStreamCodec();
-    dag.setInputPortAttribute(gto1.inport1, PortContext.STREAM_CODEC, codec1);
-    dag.validate();
-    //Assert.assertEquals("Stream codec not set", stream1.getStreamCodec(), codec1);
-
-    GenericTestOperator gto2 = dag.addOperator("gto2", GenericTestOperator.class);
-    GenericTestOperator gto3 = dag.addOperator("gto3", GenericTestOperator.class);
-    StreamMeta stream2 = dag.addStream("s2", gto1.outport1, gto2.inport1, gto3.inport1);
-    dag.setInputPortAttribute(gto2.inport1, PortContext.STREAM_CODEC, codec1);
-    try {
-      dag.validate();
-    } catch (ValidationException e) {
-      String msg = e.getMessage();
-      if (!msg.startsWith("Stream codec not set on input port") || !msg.contains("gto3")
-              || !msg.contains(codec1.toString()) || !msg.endsWith("was specified on another port")) {
-        Assert.fail(String.format("LogicalPlan validation error msg: %s", msg));
-      }
-    }
-
-    dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec1);
-    dag.validate();
-    //Assert.assertEquals("Stream codec not set", stream2.getStreamCodec(), codec1);
-
-    StreamCodec<?> codec2 = new TestStreamCodec();
-    dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec2);
-    try {
-      dag.validate();
-    } catch (ValidationException e) {
-      String msg = e.getMessage();
-      if (!msg.startsWith("Conflicting stream codec set on input port") || !msg.contains("gto3")
-              || !msg.contains(codec2.toString()) || !msg.endsWith("was specified on another port")) {
-        Assert.fail(String.format("LogicalPlan validation error msg: %s", msg));
-      }
-    }
-
-    dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec1);
-    TestPortCodecOperator pco = dag.addOperator("pco", TestPortCodecOperator.class);
-    StreamMeta stream3 = dag.addStream("s3", gto2.outport1, pco.inport1);
-    dag.validate();
-    //Assert.assertEquals("Stream codec class not set", stream3.getCodecClass(), TestStreamCodec.class);
-
-    dag.setInputPortAttribute(pco.inport1, PortContext.STREAM_CODEC, codec2);
-    dag.validate();
-    //Assert.assertEquals("Stream codec not set", stream3.getStreamCodec(), codec2);
-  }
-  */
-
-  @Test
-  public void testCheckpointableWithinAppWindowAnnotation()
-  {
-    LogicalPlan dag = new LogicalPlan();
-    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
-    GenericTestOperator x = dag.addOperator("x", new GenericTestOperator());
-    dag.addStream("Stream1", input1.outport, x.inport1);
-    dag.setAttribute(x, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
-    dag.setAttribute(x, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
-    dag.validate();
-
-    TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
-    CheckpointableWithinAppWindowOperator y = dag.addOperator("y", new CheckpointableWithinAppWindowOperator());
-    dag.addStream("Stream2", input2.outport, y.inport1);
-    dag.setAttribute(y, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
-    dag.setAttribute(y, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
-    dag.validate();
-
-    TestGeneratorInputOperator input3 = dag.addOperator("input3", TestGeneratorInputOperator.class);
-    NotCheckpointableWithinAppWindowOperator z = dag.addOperator("z", new NotCheckpointableWithinAppWindowOperator());
-    dag.addStream("Stream3", input3.outport, z.inport1);
-    dag.setAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
-    dag.setAttribute(z, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
-    try {
-      dag.validate();
-      Assert.fail("should fail because chekpoint window count is not a factor of application window count");
-    }
-    catch (ValidationException e) {
-      // expected
-    }
-
-    dag.setAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 30);
-    dag.validate();
-
-    dag.setAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 45);
-    try {
-      dag.validate();
-      Assert.fail("should fail because chekpoint window count is not a factor of application window count");
-    }
-    catch (ValidationException e) {
-      // expected
-    }
-  }
-
-  @OperatorAnnotation(checkpointableWithinAppWindow = true)
-  class CheckpointableWithinAppWindowOperator extends GenericTestOperator
-  {
-  }
-
-  @OperatorAnnotation(checkpointableWithinAppWindow = false)
-  class NotCheckpointableWithinAppWindowOperator extends GenericTestOperator
-  {
-  }
-
-  @Test
-  public void testInputPortHiding()
-  {
-    LogicalPlan dag = new LogicalPlan();
-    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
-    Operator2 operator2 = dag.addOperator("operator2", new Operator2());
-    dag.addStream("Stream1", input1.outport, operator2.input);
-    dag.validate();
-  }
-
-  @Test
-  public void testInvalidInputPortConnection()
-  {
-    LogicalPlan dag = new LogicalPlan();
-    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
-    Operator1 operator1 = dag.addOperator("operator3", new Operator3());
-    dag.addStream("Stream1", input1.outport, operator1.input);
-    try {
-      dag.validate();
-    } catch (ValidationException ex) {
-      Assert.assertTrue("validation message", ex.getMessage().startsWith("Invalid port connected"));
-      return;
-    }
-    Assert.fail();
-  }
-
-  class Operator1 extends BaseOperator
-  {
-    public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
-    {
-      @Override
-      public void process(Object tuple)
-      {
-
-      }
-    };
-  }
-
-  class Operator2 extends Operator1
-  {
-    public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
-    {
-      @Override
-      public void process(Object tuple)
-      {
-
-      }
-    };
-  }
-
-  class Operator3 extends Operator1
-  {
-    @InputPortFieldAnnotation(optional = true)
-    public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
-    {
-      @Override
-      public void process(Object tuple)
-      {
-
-      }
-    };
-  }
-
-  @Test
-  public void testOutputPortHiding()
-  {
-    LogicalPlan dag = new LogicalPlan();
-    Operator5 operator5 = dag.addOperator("input", new Operator5());
-    Operator2 operator2 = dag.addOperator("operator2", new Operator2());
-    dag.addStream("Stream1", operator5.output, operator2.input);
-    dag.validate();
-  }
-
-  @Test(expected = ValidationException.class)
-  public void testInvalidOutputPortConnection()
-  {
-    LogicalPlan dag = new LogicalPlan();
-    Operator4 operator4 = dag.addOperator("input", new Operator5());
-    Operator3 operator3 = dag.addOperator("operator3", new Operator3());
-    dag.addStream("Stream1", operator4.output, operator3.input);
-    dag.validate();
-  }
-
-  class Operator4 extends BaseOperator implements InputOperator
-  {
-    public final transient DefaultOutputPort<Object> output = new DefaultOutputPort<>();
-
-    @Override
-    public void emitTuples()
-    {
-
-    }
-  }
-
-  class Operator5 extends Operator4
-  {
-    public final transient DefaultOutputPort<Object> output = new DefaultOutputPort<>();
-  }
-
-  /*
-  These were tests for operator semantics that verified if an operator class implements InputOperator then the same class should not declare input ports.
-  This would be done later when we are able to verify user code at compile-time.
-
-    validation()
-  {
-    if (n.getOperator() instanceof InputOperator) {
-      try {
-        for (Class<?> clazz : n.getOperator().getClass().getInterfaces()) {
-          if (clazz.getName().equals(InputOperator.class.getName())) {
-            for (Field field : n.getOperator().getClass().getDeclaredFields()) {
-              field.setAccessible(true);
-              Object declaredObject = field.get(n.getOperator());
-              if (declaredObject instanceof InputPort) {
-                throw new ValidationException("Operator class implements InputOperator and also declares input ports: " + n.name);
-              }
-            }
-            break;
-          }
-        }
-      }
-      catch (IllegalAccessException e) {
-        throw new RuntimeException(e);
-      }
-    }
-  }
-  @Test
-  public void testInvalidInputOperatorDeclaration()
-  {
-    LogicalPlan dag = new LogicalPlan();
-
-    TestGeneratorInputOperator.InvalidInputOperator inputOperator = dag.addOperator("input", new TestGeneratorInputOperator.InvalidInputOperator());
-    GenericTestOperator operator2 = dag.addOperator("operator2", GenericTestOperator.class);
-
-    dag.addStream("stream1", inputOperator.outport, operator2.inport1);
-
-    try {
-      dag.validate();
-      fail("validation should fail");
-    }
-    catch (ValidationException e) {
-      // expected
-    }
-  }
-
-  @Test
-  public void testValidInputOperatorDeclaration()
-  {
-    LogicalPlan dag = new LogicalPlan();
-
-    TestGeneratorInputOperator.ValidGenericOperator operator1 = dag.addOperator("input", new TestGeneratorInputOperator.ValidGenericOperator());
-    GenericTestOperator operator2 = dag.addOperator("operator2", GenericTestOperator.class);
-
-    dag.addStream("stream1", operator1.outport, operator2.inport1);
-    dag.validate();
-  }
-  */
-}



[10/50] [abbrv] incubator-apex-core git commit: Merge branch 'davidyan74-APEX-101' into devel-3

Posted by vr...@apache.org.
Merge branch 'davidyan74-APEX-101' into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/9d83a444
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/9d83a444
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/9d83a444

Branch: refs/heads/feature-module
Commit: 9d83a44457ab7c30c4f8d521eec24b82780b1851
Parents: b57972b b82f905
Author: Gaurav Gupta <ga...@apache.org>
Authored: Thu Sep 10 14:42:39 2015 -0700
Committer: Gaurav Gupta <ga...@apache.org>
Committed: Thu Sep 10 14:42:39 2015 -0700

----------------------------------------------------------------------
 .../com/datatorrent/stram/StreamingContainerManager.java     | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------



[20/50] [abbrv] incubator-apex-core git commit: Fixed version reference

Posted by vr...@apache.org.
Fixed version reference


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/f296129c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/f296129c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/f296129c

Branch: refs/heads/feature-module
Commit: f296129c34032ecaf5c456ffd047389b57d1f93e
Parents: 8aae573
Author: Aniruddha Thombre <an...@aniruddhas.com>
Authored: Mon Sep 14 18:35:44 2015 +0530
Committer: Aniruddha Thombre <an...@aniruddhas.com>
Committed: Mon Sep 14 18:35:44 2015 +0530

----------------------------------------------------------------------
 bufferserver/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/f296129c/bufferserver/pom.xml
----------------------------------------------------------------------
diff --git a/bufferserver/pom.xml b/bufferserver/pom.xml
index 5d169ae..b09db32 100644
--- a/bufferserver/pom.xml
+++ b/bufferserver/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.2.0-SNAPSHOT</version>
+    <version>${project.version}</version>
   </parent>
 
   <artifactId>dt-bufferserver</artifactId>


[25/50] [abbrv] incubator-apex-core git commit: Merge branch 'devel-3' of https://github.com/aniruddhas/incubator-apex-core into devel-3

Posted by vr...@apache.org.
Merge branch 'devel-3' of https://github.com/aniruddhas/incubator-apex-core into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/eead2d17
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/eead2d17
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/eead2d17

Branch: refs/heads/feature-module
Commit: eead2d178500437098ad70a95c591f4fa7ac1f2a
Parents: 5e1d541 9aa8514
Author: Thomas Weise <th...@datatorrent.com>
Authored: Mon Sep 14 13:01:13 2015 -0700
Committer: Thomas Weise <th...@datatorrent.com>
Committed: Mon Sep 14 13:01:13 2015 -0700

----------------------------------------------------------------------
 CHANGELOG.md                                        | 16 ++++++++++++++++
 bufferserver/pom.xml                                |  2 +-
 .../common/util/AsyncFSStorageAgent.java            |  5 +++++
 3 files changed, 22 insertions(+), 1 deletion(-)
----------------------------------------------------------------------



[44/50] [abbrv] incubator-apex-core git commit: Merge branch 'APEX-120' of https://github.com/gauravgopi123/incubator-apex-core into devel-3

Posted by vr...@apache.org.
Merge branch 'APEX-120' of https://github.com/gauravgopi123/incubator-apex-core into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/d2f73e31
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/d2f73e31
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/d2f73e31

Branch: refs/heads/feature-module
Commit: d2f73e3124e409979d1aca5900c5883a28482a5b
Parents: 041beb9 2c081b4
Author: Vlad Rozov <v....@datatorrent.com>
Authored: Thu Sep 17 16:42:59 2015 -0700
Committer: Vlad Rozov <v....@datatorrent.com>
Committed: Thu Sep 17 16:42:59 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/common/util/AsyncFSStorageAgent.java   | 5 ++---
 .../com/datatorrent/common/util/AsyncFSStorageAgentTest.java    | 1 -
 2 files changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------



[06/50] [abbrv] incubator-apex-core git commit: Merge branch 'devel-3' of https://github.com/chandnisingh/incubator-apex-core into devel-3

Posted by vr...@apache.org.
Merge branch 'devel-3' of https://github.com/chandnisingh/incubator-apex-core into devel-3


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/065ddbef
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/065ddbef
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/065ddbef

Branch: refs/heads/feature-module
Commit: 065ddbef4d11a9179853341e9394183bac201a2b
Parents: 8eb81f7 39d5d31
Author: David Yan <da...@datatorrent.com>
Authored: Wed Sep 9 17:06:36 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Wed Sep 9 17:06:36 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/plan/logical/Operators.java   | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)
----------------------------------------------------------------------



[35/50] [abbrv] incubator-apex-core git commit: APEX-28 #resolve

Posted by vr...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanTest.java b/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanTest.java
new file mode 100644
index 0000000..94dce6c
--- /dev/null
+++ b/engine/src/test/java/com/datatorrent/stram/plan/logical/LogicalPlanTest.java
@@ -0,0 +1,988 @@
+/**
+ * Copyright (C) 2015 DataTorrent, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *         http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.datatorrent.stram.plan.logical;
+
+import com.datatorrent.common.util.BaseOperator;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.Serializable;
+import java.util.*;
+
+import javax.validation.*;
+import javax.validation.constraints.AssertTrue;
+import javax.validation.constraints.Min;
+import javax.validation.constraints.NotNull;
+import javax.validation.constraints.Pattern;
+
+import com.esotericsoftware.kryo.DefaultSerializer;
+import com.esotericsoftware.kryo.serializers.JavaSerializer;
+import com.google.common.collect.Maps;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+import com.datatorrent.common.partitioner.StatelessPartitioner;
+import com.datatorrent.api.*;
+import com.datatorrent.api.Context.OperatorContext;
+import com.datatorrent.api.Context.PortContext;
+import com.datatorrent.api.DAG.Locality;
+import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+import com.datatorrent.api.annotation.OperatorAnnotation;
+import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
+import com.datatorrent.netlet.util.Slice;
+import com.datatorrent.stram.engine.GenericTestOperator;
+import com.datatorrent.stram.engine.TestGeneratorInputOperator;
+import com.datatorrent.stram.engine.TestNonOptionalOutportInputOperator;
+import com.datatorrent.stram.engine.TestOutputOperator;
+import com.datatorrent.stram.plan.logical.LogicalPlan;
+import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
+import com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta;
+import com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent;
+import com.datatorrent.stram.support.StramTestSupport.RegexMatcher;
+
+public class LogicalPlanTest {
+
+  @Test
+  public void testCycleDetection() {
+     LogicalPlan dag = new LogicalPlan();
+
+     //NodeConf operator1 = b.getOrAddNode("operator1");
+     GenericTestOperator operator2 = dag.addOperator("operator2", GenericTestOperator.class);
+     GenericTestOperator operator3 = dag.addOperator("operator3", GenericTestOperator.class);
+     GenericTestOperator operator4 = dag.addOperator("operator4", GenericTestOperator.class);
+     //NodeConf operator5 = b.getOrAddNode("operator5");
+     //NodeConf operator6 = b.getOrAddNode("operator6");
+     GenericTestOperator operator7 = dag.addOperator("operator7", GenericTestOperator.class);
+
+     // strongly connect n2-n3-n4-n2
+     dag.addStream("n2n3", operator2.outport1, operator3.inport1);
+
+     dag.addStream("n3n4", operator3.outport1, operator4.inport1);
+
+     dag.addStream("n4n2", operator4.outport1, operator2.inport1);
+
+     // self referencing operator cycle
+     StreamMeta n7n7 = dag.addStream("n7n7", operator7.outport1, operator7.inport1);
+     try {
+       n7n7.addSink(operator7.inport1);
+       fail("cannot add to stream again");
+     } catch (Exception e) {
+       // expected, stream can have single input/output only
+     }
+
+     List<List<String>> cycles = new ArrayList<List<String>>();
+     dag.findStronglyConnected(dag.getMeta(operator7), cycles);
+     assertEquals("operator self reference", 1, cycles.size());
+     assertEquals("operator self reference", 1, cycles.get(0).size());
+     assertEquals("operator self reference", dag.getMeta(operator7).getName(), cycles.get(0).get(0));
+
+     // 3 operator cycle
+     cycles.clear();
+     dag.findStronglyConnected(dag.getMeta(operator4), cycles);
+     assertEquals("3 operator cycle", 1, cycles.size());
+     assertEquals("3 operator cycle", 3, cycles.get(0).size());
+     assertTrue("operator2", cycles.get(0).contains(dag.getMeta(operator2).getName()));
+     assertTrue("operator3", cycles.get(0).contains(dag.getMeta(operator3).getName()));
+     assertTrue("operator4", cycles.get(0).contains(dag.getMeta(operator4).getName()));
+
+     try {
+       dag.validate();
+       fail("validation should fail");
+     } catch (ValidationException e) {
+       // expected
+     }
+
+  }
+
+  public static class ValidationOperator extends BaseOperator {
+    public final transient DefaultOutputPort<Object> goodOutputPort = new DefaultOutputPort<Object>();
+
+    public final transient DefaultOutputPort<Object> badOutputPort = new DefaultOutputPort<Object>();
+  }
+
+  public static class CounterOperator extends BaseOperator {
+    final public transient InputPort<Object> countInputPort = new DefaultInputPort<Object>() {
+      @Override
+      final public void process(Object payload) {
+      }
+    };
+  }
+
+  @Test
+  public void testLogicalPlanSerialization() throws Exception {
+
+    LogicalPlan dag = new LogicalPlan();
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent());
+
+    ValidationOperator validationNode = dag.addOperator("validationNode", ValidationOperator.class);
+    CounterOperator countGoodNode = dag.addOperator("countGoodNode", CounterOperator.class);
+    CounterOperator countBadNode = dag.addOperator("countBadNode", CounterOperator.class);
+    //ConsoleOutputOperator echoBadNode = dag.addOperator("echoBadNode", ConsoleOutputOperator.class);
+
+    // good tuples to counter operator
+    dag.addStream("goodTuplesStream", validationNode.goodOutputPort, countGoodNode.countInputPort);
+
+    // bad tuples to separate stream and echo operator
+    // (stream with 2 outputs)
+    dag.addStream("badTuplesStream", validationNode.badOutputPort, countBadNode.countInputPort);
+
+    Assert.assertEquals("number root operators", 1, dag.getRootOperators().size());
+    Assert.assertEquals("root operator id", "validationNode", dag.getRootOperators().get(0).getName());
+
+    dag.getContextAttributes(countGoodNode).put(OperatorContext.SPIN_MILLIS, 10);
+
+    ByteArrayOutputStream bos = new ByteArrayOutputStream();
+    LogicalPlan.write(dag, bos);
+
+    // System.out.println("serialized size: " + bos.toByteArray().length);
+
+    ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
+    LogicalPlan dagClone = LogicalPlan.read(bis);
+    Assert.assertNotNull(dagClone);
+    Assert.assertEquals("number operators in clone", dag.getAllOperators().size(), dagClone.getAllOperators().size());
+    Assert.assertEquals("number root operators in clone", 1, dagClone.getRootOperators().size());
+    Assert.assertTrue("root operator in operators", dagClone.getAllOperators().contains(dagClone.getRootOperators().get(0)));
+
+
+    Operator countGoodNodeClone = dagClone.getOperatorMeta("countGoodNode").getOperator();
+    Assert.assertEquals("", new Integer(10), dagClone.getContextAttributes(countGoodNodeClone).get(OperatorContext.SPIN_MILLIS));
+
+  }
+
+  @Test
+  public void testDeleteOperator()
+  {
+    LogicalPlan dag = new LogicalPlan();
+    TestGeneratorInputOperator input = dag.addOperator("input1", TestGeneratorInputOperator.class);
+    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
+    GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
+    dag.addStream("s0", input.outport, o1.inport1);
+    StreamMeta s1 = dag.addStream("s1", o1.outport1, o2.inport1);
+    dag.validate();
+    Assert.assertEquals("", 3, dag.getAllOperators().size());
+
+    dag.removeOperator(o2);
+    s1.remove();
+    dag.validate();
+    Assert.assertEquals("", 2, dag.getAllOperators().size());
+  }
+
+  public static class ValidationTestOperator extends BaseOperator implements InputOperator {
+    @NotNull
+    @Pattern(regexp=".*malhar.*", message="Value has to contain 'malhar'!")
+    private String stringField1;
+
+    @Min(2)
+    private int intField1;
+
+    @AssertTrue(message="stringField1 should end with intField1")
+    private boolean isValidConfiguration() {
+      return stringField1.endsWith(String.valueOf(intField1));
+    }
+
+    private String getterProperty2 = "";
+
+    @NotNull
+    public String getProperty2() {
+      return getterProperty2;
+    }
+
+    public void setProperty2(String s) {
+      // annotations need to be on the getter
+      getterProperty2 = s;
+    }
+
+    private String[] stringArrayField;
+
+    public String[] getStringArrayField() {
+      return stringArrayField;
+    }
+
+    public void setStringArrayField(String[] stringArrayField) {
+      this.stringArrayField = stringArrayField;
+    }
+
+    public class Nested {
+      @NotNull
+      private String property = "";
+
+      public String getProperty() {
+        return property;
+      }
+
+      public void setProperty(String property) {
+        this.property = property;
+      }
+
+    }
+
+    @Valid
+    private final Nested nestedBean = new Nested();
+
+    private String stringProperty2;
+
+    public String getStringProperty2() {
+      return stringProperty2;
+    }
+
+    public void setStringProperty2(String stringProperty2) {
+      this.stringProperty2 = stringProperty2;
+    }
+
+    private Map<String, String> mapProperty = Maps.newHashMap();
+
+    public Map<String, String> getMapProperty()
+    {
+      return mapProperty;
+    }
+
+    public void setMapProperty(Map<String, String> mapProperty)
+    {
+      this.mapProperty = mapProperty;
+    }
+
+    @Override
+    public void emitTuples() {
+      // Emit no tuples
+
+    }
+
+  }
+
+  @Test
+  public void testOperatorValidation() {
+
+    ValidationTestOperator bean = new ValidationTestOperator();
+    bean.stringField1 = "malhar1";
+    bean.intField1 = 1;
+
+    // ensure validation standalone produces expected results
+    ValidatorFactory factory =
+        Validation.buildDefaultValidatorFactory();
+    Validator validator = factory.getValidator();
+    Set<ConstraintViolation<ValidationTestOperator>> constraintViolations =
+             validator.validate(bean);
+    //for (ConstraintViolation<ValidationTestOperator> cv : constraintViolations) {
+    //  System.out.println("validation error: " + cv);
+    //}
+    Assert.assertEquals("" + constraintViolations,1, constraintViolations.size());
+    ConstraintViolation<ValidationTestOperator> cv = constraintViolations.iterator().next();
+    Assert.assertEquals("", bean.intField1, cv.getInvalidValue());
+    Assert.assertEquals("", "intField1", cv.getPropertyPath().toString());
+
+    // ensure DAG validation produces matching results
+    LogicalPlan dag = new LogicalPlan();
+    bean = dag.addOperator("testOperator", bean);
+
+    try {
+      dag.validate();
+      Assert.fail("should throw ConstraintViolationException");
+    } catch (ConstraintViolationException e) {
+      Assert.assertEquals("violation details", constraintViolations, e.getConstraintViolations());
+      String expRegex = ".*ValidationTestOperator\\{name=null}, propertyPath='intField1', message='must be greater than or equal to 2',.*value=1}]";
+      Assert.assertThat("exception message", e.getMessage(), RegexMatcher.matches(expRegex));
+    }
+
+    try {
+      bean.intField1 = 3;
+      dag.validate();
+      Assert.fail("should throw ConstraintViolationException");
+    } catch (ConstraintViolationException e) {
+      ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
+      Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
+      Assert.assertEquals("", false, cv2.getInvalidValue());
+      Assert.assertEquals("", "validConfiguration", cv2.getPropertyPath().toString());
+    }
+    bean.stringField1 = "malhar3";
+
+    // annotated getter
+    try {
+      bean.getterProperty2 = null;
+      dag.validate();
+      Assert.fail("should throw ConstraintViolationException");
+    } catch (ConstraintViolationException e) {
+      ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
+      Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
+      Assert.assertEquals("", null, cv2.getInvalidValue());
+      Assert.assertEquals("", "property2", cv2.getPropertyPath().toString());
+    }
+    bean.getterProperty2 = "";
+
+    // nested property
+    try {
+      bean.nestedBean.property = null;
+      dag.validate();
+      Assert.fail("should throw ConstraintViolationException");
+    } catch (ConstraintViolationException e) {
+      ConstraintViolation<?> cv2 = e.getConstraintViolations().iterator().next();
+      Assert.assertEquals("" + e.getConstraintViolations(), 1, constraintViolations.size());
+      Assert.assertEquals("", null, cv2.getInvalidValue());
+      Assert.assertEquals("", "nestedBean.property", cv2.getPropertyPath().toString());
+    }
+    bean.nestedBean.property = "";
+
+    // all valid
+    dag.validate();
+
+  }
+
+  @OperatorAnnotation(partitionable = false)
+  public static class TestOperatorAnnotationOperator extends BaseOperator {
+
+    @InputPortFieldAnnotation( optional = true)
+    final public transient DefaultInputPort<Object> input1 = new DefaultInputPort<Object>() {
+      @Override
+      public void process(Object tuple) {
+      }
+    };
+  }
+
+  class NoInputPortOperator extends BaseOperator {
+  }
+
+  @Test
+  public void testValidationForNonInputRootOperator() {
+    LogicalPlan dag = new LogicalPlan();
+    NoInputPortOperator x = dag.addOperator("x", new NoInputPortOperator());
+    try {
+      dag.validate();
+      Assert.fail("should fail because root operator is not input operator");
+    } catch (ValidationException e) {
+      // expected
+    }
+  }
+
+  @OperatorAnnotation(partitionable = false)
+  public static class TestOperatorAnnotationOperator2 extends BaseOperator implements Partitioner<TestOperatorAnnotationOperator2> {
+
+    @Override
+    public Collection<Partition<TestOperatorAnnotationOperator2>> definePartitions(Collection<Partition<TestOperatorAnnotationOperator2>> partitions, PartitioningContext context)
+    {
+      return null;
+    }
+
+    @Override
+    public void partitioned(Map<Integer, Partition<TestOperatorAnnotationOperator2>> partitions)
+    {
+    }
+  }
+
+  @Test
+  public void testOperatorAnnotation() {
+    LogicalPlan dag = new LogicalPlan();
+    TestGeneratorInputOperator input = dag.addOperator("input1", TestGeneratorInputOperator.class);
+    TestOperatorAnnotationOperator operator = dag.addOperator("operator1", TestOperatorAnnotationOperator.class);
+    dag.addStream("Connection", input.outport, operator.input1);
+
+
+    dag.setAttribute(operator, OperatorContext.PARTITIONER, new StatelessPartitioner<TestOperatorAnnotationOperator>(2));
+
+    try {
+      dag.validate();
+      Assert.fail("should raise operator is not partitionable for operator1");
+    } catch (ValidationException e) {
+      Assert.assertEquals("", "Operator " + dag.getMeta(operator).getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
+    }
+
+    dag.setAttribute(operator, OperatorContext.PARTITIONER, null);
+    dag.setInputPortAttribute(operator.input1, PortContext.PARTITION_PARALLEL, true);
+
+    try {
+      dag.validate();
+      Assert.fail("should raise operator is not partitionable for operator1");
+    } catch (ValidationException e) {
+      Assert.assertEquals("", "Operator " + dag.getMeta(operator).getName() + " is not partitionable but PARTITION_PARALLEL attribute is set", e.getMessage());
+    }
+
+    dag.setInputPortAttribute(operator.input1, PortContext.PARTITION_PARALLEL, false);
+    dag.validate();
+
+    dag.removeOperator(operator);
+    TestOperatorAnnotationOperator2 operator2 = dag.addOperator("operator2", TestOperatorAnnotationOperator2.class);
+
+    try {
+      dag.validate();
+      Assert.fail("should raise operator is not partitionable for operator2");
+    } catch (ValidationException e) {
+      Assert.assertEquals("Operator " + dag.getMeta(operator2).getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
+    }
+  }
+
+  @Test
+  public void testPortConnectionValidation() {
+
+    LogicalPlan dag = new LogicalPlan();
+
+    TestNonOptionalOutportInputOperator input = dag.addOperator("input1", TestNonOptionalOutportInputOperator.class);
+
+    try {
+      dag.validate();
+      Assert.fail("should raise port not connected for input1.outputPort1");
+
+    } catch (ValidationException e) {
+      Assert.assertEquals("", "Output port connection required: input1.outport1", e.getMessage());
+    }
+
+    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
+    dag.addStream("stream1", input.outport1, o1.inport1);
+    dag.validate();
+
+    // required input
+    dag.addOperator("counter", CounterOperator.class);
+    try {
+      dag.validate();
+    } catch (ValidationException e) {
+      Assert.assertEquals("", "Input port connection required: counter.countInputPort", e.getMessage());
+    }
+
+  }
+
+  @Test
+  public void testAtMostOnceProcessingModeValidation() {
+    LogicalPlan dag = new LogicalPlan();
+
+    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
+    TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
+
+    GenericTestOperator amoOper = dag.addOperator("amoOper", GenericTestOperator.class);
+    dag.setAttribute(amoOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_MOST_ONCE);
+
+    dag.addStream("input1.outport", input1.outport, amoOper.inport1);
+    dag.addStream("input2.outport", input2.outport, amoOper.inport2);
+
+    GenericTestOperator outputOper = dag.addOperator("outputOper", GenericTestOperator.class);
+    dag.setAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_LEAST_ONCE);
+    dag.addStream("aloOper.outport1", amoOper.outport1, outputOper.inport1);
+
+    try {
+      dag.validate();
+      Assert.fail("Exception expected for " + outputOper);
+    } catch (ValidationException ve) {
+      Assert.assertEquals("", ve.getMessage(), "Processing mode outputOper/AT_LEAST_ONCE not valid for source amoOper/AT_MOST_ONCE");
+    }
+    dag.setAttribute(outputOper, OperatorContext.PROCESSING_MODE, null);
+    dag.validate();
+
+    OperatorMeta outputOperOm = dag.getMeta(outputOper);
+    Assert.assertEquals("" + outputOperOm.getAttributes(), Operator.ProcessingMode.AT_MOST_ONCE, outputOperOm.getValue(OperatorContext.PROCESSING_MODE));
+
+  }
+
+    @Test
+  public void testExactlyOnceProcessingModeValidation() {
+    LogicalPlan dag = new LogicalPlan();
+
+    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
+    TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
+
+    GenericTestOperator amoOper = dag.addOperator("amoOper", GenericTestOperator.class);
+    dag.setAttribute(amoOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.EXACTLY_ONCE);
+
+    dag.addStream("input1.outport", input1.outport, amoOper.inport1);
+    dag.addStream("input2.outport", input2.outport, amoOper.inport2);
+
+    GenericTestOperator outputOper = dag.addOperator("outputOper", GenericTestOperator.class);
+    dag.addStream("aloOper.outport1", amoOper.outport1, outputOper.inport1);
+
+    try {
+      dag.validate();
+      Assert.fail("Exception expected for " + outputOper);
+    } catch (ValidationException ve) {
+      Assert.assertEquals("", ve.getMessage(), "Processing mode for outputOper should be AT_MOST_ONCE for source amoOper/EXACTLY_ONCE");
+    }
+
+    dag.setAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_LEAST_ONCE);
+
+    try {
+      dag.validate();
+      Assert.fail("Exception expected for " + outputOper);
+    } catch (ValidationException ve) {
+      Assert.assertEquals("", ve.getMessage(), "Processing mode outputOper/AT_LEAST_ONCE not valid for source amoOper/EXACTLY_ONCE");
+    }
+
+    // AT_MOST_ONCE is valid
+    dag.setAttribute(outputOper, OperatorContext.PROCESSING_MODE, Operator.ProcessingMode.AT_MOST_ONCE);
+    dag.validate();
+  }
+
+  @Test
+  public void testLocalityValidation() {
+    LogicalPlan dag = new LogicalPlan();
+
+    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
+    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
+    StreamMeta s1 = dag.addStream("input1.outport", input1.outport, o1.inport1).setLocality(Locality.THREAD_LOCAL);
+    dag.validate();
+
+    TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
+    dag.addStream("input2.outport", input2.outport, o1.inport2);
+
+    try {
+      dag.validate();
+      Assert.fail("Exception expected for " + o1);
+    } catch (ValidationException ve) {
+      Assert.assertThat("", ve.getMessage(), RegexMatcher.matches("Locality THREAD_LOCAL invalid for operator .* with multiple input streams .*"));
+    }
+
+    s1.setLocality(null);
+    dag.validate();
+  }
+
+  private class TestAnnotationsOperator extends BaseOperator implements InputOperator {
+    //final public transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<Object>();
+
+    @OutputPortFieldAnnotation( optional=false)
+    final public transient DefaultOutputPort<Object> outport2 = new DefaultOutputPort<Object>();
+
+    @Override
+    public void emitTuples() {
+      // Emit Nothing
+
+    }
+  }
+
+  private class TestAnnotationsOperator2 extends BaseOperator implements InputOperator{
+    // multiple ports w/o annotation, one of them must be connected
+    final public transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<Object>();
+
+    @Override
+    public void emitTuples() {
+      // Emit Nothing
+
+    }
+  }
+
+  private class TestAnnotationsOperator3 extends BaseOperator implements InputOperator{
+    // multiple ports w/o annotation, one of them must be connected
+    @OutputPortFieldAnnotation( optional=true)
+    final public transient DefaultOutputPort<Object> outport1 = new DefaultOutputPort<Object>();
+    @OutputPortFieldAnnotation( optional=true)
+    final public transient DefaultOutputPort<Object> outport2 = new DefaultOutputPort<Object>();
+    @Override
+    public void emitTuples() {
+      // Emit Nothing
+
+    }
+  }
+
+  @Test
+  public void testOutputPortAnnotation() {
+    LogicalPlan dag = new LogicalPlan();
+    TestAnnotationsOperator ta1 = dag.addOperator("testAnnotationsOperator", new TestAnnotationsOperator());
+
+    try {
+      dag.validate();
+      Assert.fail("should raise: port connection required");
+    } catch (ValidationException e) {
+      Assert.assertEquals("", "Output port connection required: testAnnotationsOperator.outport2", e.getMessage());
+    }
+
+    TestOutputOperator o2 = dag.addOperator("sink", new TestOutputOperator());
+    dag.addStream("s1", ta1.outport2, o2.inport);
+
+    dag.validate();
+
+    TestAnnotationsOperator2 ta2 = dag.addOperator("multiOutputPorts1", new TestAnnotationsOperator2());
+
+    try {
+      dag.validate();
+      Assert.fail("should raise: At least one output port must be connected");
+    } catch (ValidationException e) {
+      Assert.assertEquals("", "At least one output port must be connected: multiOutputPorts1", e.getMessage());
+    }
+    TestOutputOperator o3 = dag.addOperator("o3", new TestOutputOperator());
+    dag.addStream("s2", ta2.outport1, o3.inport);
+
+    dag.addOperator("multiOutputPorts3", new TestAnnotationsOperator3());
+    dag.validate();
+
+  }
+
+  /**
+   * Operator that can be used with default Java serialization instead of Kryo
+   */
+  @DefaultSerializer(JavaSerializer.class)
+  public static class JdkSerializableOperator extends BaseOperator implements Serializable {
+    private static final long serialVersionUID = -4024202339520027097L;
+
+    public abstract class SerializableInputPort<T> implements InputPort<T>, Sink<T>, java.io.Serializable {
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      public Sink<T> getSink() {
+        return this;
+      }
+
+      @Override
+      public void setConnected(boolean connected) {
+      }
+
+      @Override
+      public void setup(PortContext context)
+      {
+      }
+
+      @Override
+      public void teardown()
+      {
+      }
+
+      @Override
+      public StreamCodec<T> getStreamCodec() {
+        return null;
+      }
+    }
+
+    @InputPortFieldAnnotation( optional=true)
+    final public InputPort<Object> inport1 = new SerializableInputPort<Object>() {
+      private static final long serialVersionUID = 1L;
+
+      @Override
+      final public void put(Object payload)
+      {
+      }
+
+      @Override
+      public int getCount(boolean reset)
+      {
+        return 0;
+      }
+
+    };
+  }
+
+  @Test
+  public void testJdkSerializableOperator() throws Exception {
+    LogicalPlan dag = new LogicalPlan();
+    dag.addOperator("o1", new JdkSerializableOperator());
+
+    ByteArrayOutputStream outStream = new ByteArrayOutputStream();
+    LogicalPlan.write(dag, outStream);
+    outStream.close();
+
+    LogicalPlan clonedDag = LogicalPlan.read(new ByteArrayInputStream(outStream.toByteArray()));
+    JdkSerializableOperator o1Clone = (JdkSerializableOperator)clonedDag.getOperatorMeta("o1").getOperator();
+    Assert.assertNotNull("port object null", o1Clone.inport1);
+  }
+
+  private static class TestStreamCodec implements StreamCodec<Object> {
+    @Override
+    public Object fromByteArray(Slice fragment)
+    {
+      return fragment.stringValue();
+    }
+
+    @Override
+    public Slice toByteArray(Object o)
+    {
+      byte[] b = o.toString().getBytes();
+      return new Slice(b, 0, b.length);
+    }
+
+    @Override
+    public int getPartition(Object o)
+    {
+      return o.hashCode() / 2;
+    }
+  }
+
+  public static class TestPortCodecOperator extends BaseOperator {
+    public transient final DefaultInputPort<Object> inport1 = new DefaultInputPort<Object>()
+    {
+      @Override
+      public void process(Object tuple)
+      {
+
+      }
+
+      @Override
+      public StreamCodec<Object> getStreamCodec()
+      {
+        return new TestStreamCodec();
+      }
+    };
+
+    @OutputPortFieldAnnotation( optional = true)
+    public transient final DefaultOutputPort<Object> outport = new DefaultOutputPort<Object>();
+  }
+
+  /*
+  @Test
+  public void testStreamCodec() throws Exception {
+    LogicalPlan dag = new LogicalPlan();
+    TestGeneratorInputOperator input = dag.addOperator("input", TestGeneratorInputOperator.class);
+    GenericTestOperator gto1 = dag.addOperator("gto1", GenericTestOperator.class);
+    StreamMeta stream1 = dag.addStream("s1", input.outport, gto1.inport1);
+    StreamCodec<?> codec1 = new TestStreamCodec();
+    dag.setInputPortAttribute(gto1.inport1, PortContext.STREAM_CODEC, codec1);
+    dag.validate();
+    //Assert.assertEquals("Stream codec not set", stream1.getStreamCodec(), codec1);
+
+    GenericTestOperator gto2 = dag.addOperator("gto2", GenericTestOperator.class);
+    GenericTestOperator gto3 = dag.addOperator("gto3", GenericTestOperator.class);
+    StreamMeta stream2 = dag.addStream("s2", gto1.outport1, gto2.inport1, gto3.inport1);
+    dag.setInputPortAttribute(gto2.inport1, PortContext.STREAM_CODEC, codec1);
+    try {
+      dag.validate();
+    } catch (ValidationException e) {
+      String msg = e.getMessage();
+      if (!msg.startsWith("Stream codec not set on input port") || !msg.contains("gto3")
+              || !msg.contains(codec1.toString()) || !msg.endsWith("was specified on another port")) {
+        Assert.fail(String.format("LogicalPlan validation error msg: %s", msg));
+      }
+    }
+
+    dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec1);
+    dag.validate();
+    //Assert.assertEquals("Stream codec not set", stream2.getStreamCodec(), codec1);
+
+    StreamCodec<?> codec2 = new TestStreamCodec();
+    dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec2);
+    try {
+      dag.validate();
+    } catch (ValidationException e) {
+      String msg = e.getMessage();
+      if (!msg.startsWith("Conflicting stream codec set on input port") || !msg.contains("gto3")
+              || !msg.contains(codec2.toString()) || !msg.endsWith("was specified on another port")) {
+        Assert.fail(String.format("LogicalPlan validation error msg: %s", msg));
+      }
+    }
+
+    dag.setInputPortAttribute(gto3.inport1, PortContext.STREAM_CODEC, codec1);
+    TestPortCodecOperator pco = dag.addOperator("pco", TestPortCodecOperator.class);
+    StreamMeta stream3 = dag.addStream("s3", gto2.outport1, pco.inport1);
+    dag.validate();
+    //Assert.assertEquals("Stream codec class not set", stream3.getCodecClass(), TestStreamCodec.class);
+
+    dag.setInputPortAttribute(pco.inport1, PortContext.STREAM_CODEC, codec2);
+    dag.validate();
+    //Assert.assertEquals("Stream codec not set", stream3.getStreamCodec(), codec2);
+  }
+  */
+
+  @Test
+  public void testCheckpointableWithinAppWindowAnnotation()
+  {
+    LogicalPlan dag = new LogicalPlan();
+    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
+    GenericTestOperator x = dag.addOperator("x", new GenericTestOperator());
+    dag.addStream("Stream1", input1.outport, x.inport1);
+    dag.setAttribute(x, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
+    dag.setAttribute(x, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
+    dag.validate();
+
+    TestGeneratorInputOperator input2 = dag.addOperator("input2", TestGeneratorInputOperator.class);
+    CheckpointableWithinAppWindowOperator y = dag.addOperator("y", new CheckpointableWithinAppWindowOperator());
+    dag.addStream("Stream2", input2.outport, y.inport1);
+    dag.setAttribute(y, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
+    dag.setAttribute(y, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
+    dag.validate();
+
+    TestGeneratorInputOperator input3 = dag.addOperator("input3", TestGeneratorInputOperator.class);
+    NotCheckpointableWithinAppWindowOperator z = dag.addOperator("z", new NotCheckpointableWithinAppWindowOperator());
+    dag.addStream("Stream3", input3.outport, z.inport1);
+    dag.setAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 15);
+    dag.setAttribute(z, OperatorContext.APPLICATION_WINDOW_COUNT, 30);
+    try {
+      dag.validate();
+      Assert.fail("should fail because chekpoint window count is not a factor of application window count");
+    }
+    catch (ValidationException e) {
+      // expected
+    }
+
+    dag.setAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 30);
+    dag.validate();
+
+    dag.setAttribute(z, OperatorContext.CHECKPOINT_WINDOW_COUNT, 45);
+    try {
+      dag.validate();
+      Assert.fail("should fail because chekpoint window count is not a factor of application window count");
+    }
+    catch (ValidationException e) {
+      // expected
+    }
+  }
+
+  @OperatorAnnotation(checkpointableWithinAppWindow = true)
+  class CheckpointableWithinAppWindowOperator extends GenericTestOperator
+  {
+  }
+
+  @OperatorAnnotation(checkpointableWithinAppWindow = false)
+  class NotCheckpointableWithinAppWindowOperator extends GenericTestOperator
+  {
+  }
+
+  @Test
+  public void testInputPortHiding()
+  {
+    LogicalPlan dag = new LogicalPlan();
+    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
+    Operator2 operator2 = dag.addOperator("operator2", new Operator2());
+    dag.addStream("Stream1", input1.outport, operator2.input);
+    dag.validate();
+  }
+
+  @Test
+  public void testInvalidInputPortConnection()
+  {
+    LogicalPlan dag = new LogicalPlan();
+    TestGeneratorInputOperator input1 = dag.addOperator("input1", TestGeneratorInputOperator.class);
+    Operator1 operator1 = dag.addOperator("operator3", new Operator3());
+    dag.addStream("Stream1", input1.outport, operator1.input);
+    try {
+      dag.validate();
+    } catch (ValidationException ex) {
+      Assert.assertTrue("validation message", ex.getMessage().startsWith("Invalid port connected"));
+      return;
+    }
+    Assert.fail();
+  }
+
+  class Operator1 extends BaseOperator
+  {
+    public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
+    {
+      @Override
+      public void process(Object tuple)
+      {
+
+      }
+    };
+  }
+
+  class Operator2 extends Operator1
+  {
+    public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
+    {
+      @Override
+      public void process(Object tuple)
+      {
+
+      }
+    };
+  }
+
+  class Operator3 extends Operator1
+  {
+    @InputPortFieldAnnotation(optional = true)
+    public final transient DefaultInputPort<Object> input = new DefaultInputPort<Object>()
+    {
+      @Override
+      public void process(Object tuple)
+      {
+
+      }
+    };
+  }
+
+  @Test
+  public void testOutputPortHiding()
+  {
+    LogicalPlan dag = new LogicalPlan();
+    Operator5 operator5 = dag.addOperator("input", new Operator5());
+    Operator2 operator2 = dag.addOperator("operator2", new Operator2());
+    dag.addStream("Stream1", operator5.output, operator2.input);
+    dag.validate();
+  }
+
+  @Test(expected = ValidationException.class)
+  public void testInvalidOutputPortConnection()
+  {
+    LogicalPlan dag = new LogicalPlan();
+    Operator4 operator4 = dag.addOperator("input", new Operator5());
+    Operator3 operator3 = dag.addOperator("operator3", new Operator3());
+    dag.addStream("Stream1", operator4.output, operator3.input);
+    dag.validate();
+  }
+
+  class Operator4 extends BaseOperator implements InputOperator
+  {
+    public final transient DefaultOutputPort<Object> output = new DefaultOutputPort<>();
+
+    @Override
+    public void emitTuples()
+    {
+
+    }
+  }
+
+  class Operator5 extends Operator4
+  {
+    public final transient DefaultOutputPort<Object> output = new DefaultOutputPort<>();
+  }
+
+  /*
+  These were tests for operator semantics that verified if an operator class implements InputOperator then the same class should not declare input ports.
+  This would be done later when we are able to verify user code at compile-time.
+
+    validation()
+  {
+    if (n.getOperator() instanceof InputOperator) {
+      try {
+        for (Class<?> clazz : n.getOperator().getClass().getInterfaces()) {
+          if (clazz.getName().equals(InputOperator.class.getName())) {
+            for (Field field : n.getOperator().getClass().getDeclaredFields()) {
+              field.setAccessible(true);
+              Object declaredObject = field.get(n.getOperator());
+              if (declaredObject instanceof InputPort) {
+                throw new ValidationException("Operator class implements InputOperator and also declares input ports: " + n.name);
+              }
+            }
+            break;
+          }
+        }
+      }
+      catch (IllegalAccessException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+  @Test
+  public void testInvalidInputOperatorDeclaration()
+  {
+    LogicalPlan dag = new LogicalPlan();
+
+    TestGeneratorInputOperator.InvalidInputOperator inputOperator = dag.addOperator("input", new TestGeneratorInputOperator.InvalidInputOperator());
+    GenericTestOperator operator2 = dag.addOperator("operator2", GenericTestOperator.class);
+
+    dag.addStream("stream1", inputOperator.outport, operator2.inport1);
+
+    try {
+      dag.validate();
+      fail("validation should fail");
+    }
+    catch (ValidationException e) {
+      // expected
+    }
+  }
+
+  @Test
+  public void testValidInputOperatorDeclaration()
+  {
+    LogicalPlan dag = new LogicalPlan();
+
+    TestGeneratorInputOperator.ValidGenericOperator operator1 = dag.addOperator("input", new TestGeneratorInputOperator.ValidGenericOperator());
+    GenericTestOperator operator2 = dag.addOperator("operator2", GenericTestOperator.class);
+
+    dag.addStream("stream1", operator1.outport, operator2.inport1);
+    dag.validate();
+  }
+  */
+}

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/engine/src/test/resources/schemaTestTopology.json
----------------------------------------------------------------------
diff --git a/engine/src/test/resources/schemaTestTopology.json b/engine/src/test/resources/schemaTestTopology.json
index 6c779fd..46c0e78 100644
--- a/engine/src/test/resources/schemaTestTopology.json
+++ b/engine/src/test/resources/schemaTestTopology.json
@@ -36,7 +36,7 @@
         }
       ],
       "schema": {
-        "class": "com.datatorrent.stram.plan.LogicalPlanConfigurationTest$TestSchema"
+        "class": "com.datatorrent.stram.plan.logical.LogicalPlanConfigurationTest$TestSchema"
       }
     }
   ]



[26/50] [abbrv] incubator-apex-core git commit: APEX-124 #resolve

Posted by vr...@apache.org.
APEX-124 #resolve


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/91e63b54
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/91e63b54
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/91e63b54

Branch: refs/heads/feature-module
Commit: 91e63b5448ecbfb210e9311449c1d756cdbf0862
Parents: eead2d1
Author: Gaurav <ga...@datatorrent.com>
Authored: Mon Sep 14 15:30:26 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Mon Sep 14 15:30:26 2015 -0700

----------------------------------------------------------------------
 pom.xml | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/91e63b54/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7825b81..484798b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -134,6 +134,9 @@
         </dependencies>
         <configuration>
           <excludedGroups>${test.excludedGroups}</excludedGroups>
+          <systemPropertyVariables>
+            <java.io.tmpdir>${project.build.directory}</java.io.tmpdir>
+          </systemPropertyVariables>
         </configuration>
       </plugin>
       <plugin>


[39/50] [abbrv] incubator-apex-core git commit: APEX-28 #resolve

Posted by vr...@apache.org.
APEX-28 #resolve

 - Rename of files requires a separate commit to preserve attribution.
 - Improved documentation
 - Added unit test to make sure that attributes declared in multiple contexts have the same type.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/977093e1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/977093e1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/977093e1

Branch: refs/heads/feature-module
Commit: 977093e171f1183985ae80d42b0d6dbc3af6cbc5
Parents: 434a717
Author: Timothy Farkas <ti...@datatorrent.com>
Authored: Tue Aug 25 18:03:08 2015 -0700
Committer: Timothy Farkas <ti...@datatorrent.com>
Committed: Wed Sep 16 15:31:44 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/api/Attribute.java     |   11 +-
 .../main/java/com/datatorrent/api/Context.java  |   10 -
 .../stram/plan/logical/LogicalPlan.java         |    6 +-
 .../plan/logical/LogicalPlanConfiguration.java  |  472 +++--
 .../plan/LogicalPlanConfigurationTest.java      | 1788 ------------------
 .../datatorrent/stram/plan/LogicalPlanTest.java |  990 ----------
 .../logical/LogicalPlanConfigurationTest.java   | 1511 +++++++++++++++
 .../stram/plan/logical/LogicalPlanTest.java     |  988 ++++++++++
 .../src/test/resources/schemaTestTopology.json  |    2 +-
 9 files changed, 2785 insertions(+), 2993 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/api/src/main/java/com/datatorrent/api/Attribute.java
----------------------------------------------------------------------
diff --git a/api/src/main/java/com/datatorrent/api/Attribute.java b/api/src/main/java/com/datatorrent/api/Attribute.java
index 4c16a2a..a7492b5 100644
--- a/api/src/main/java/com/datatorrent/api/Attribute.java
+++ b/api/src/main/java/com/datatorrent/api/Attribute.java
@@ -277,13 +277,6 @@ public class Attribute<T> implements Serializable
         if (map.containsKey(clazz)) {
           return 0;
         }
-
-        map.put(clazz, getAttributesNoSave(clazz));
-        return (long)clazz.getModifiers() << 32 | clazz.hashCode();
-      }
-
-      public static Set<Attribute<Object>> getAttributesNoSave(Class<?> clazz)
-      {
         Set<Attribute<Object>> set = new HashSet<Attribute<Object>>();
         try {
           for (Field f: clazz.getDeclaredFields()) {
@@ -330,8 +323,8 @@ public class Attribute<T> implements Serializable
         catch (Exception ex) {
           DTThrowable.rethrow(ex);
         }
-
-        return set;
+        map.put(clazz, set);
+        return (long)clazz.getModifiers() << 32 | clazz.hashCode();
       }
 
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/api/src/main/java/com/datatorrent/api/Context.java
----------------------------------------------------------------------
diff --git a/api/src/main/java/com/datatorrent/api/Context.java b/api/src/main/java/com/datatorrent/api/Context.java
index c2d974a..cd10398 100644
--- a/api/src/main/java/com/datatorrent/api/Context.java
+++ b/api/src/main/java/com/datatorrent/api/Context.java
@@ -33,16 +33,6 @@ import com.datatorrent.api.annotation.Stateless;
  */
 public interface Context
 {
-  /*
-   * Note: If the same name is given to an Attribute specified in multiple Context classes, then the type of that
-   * Attribute is required to be the same accross all Context classes. This is required because if a simple attribute
-   * name is specified in a properties file at the top level context then that attribute needs to be set in all child configurations. If
-   * there were multiple Attributes specified in different Contexts with the same name, but a different type, then
-   * it would not be possible to set the values of Attributes specified by a simple attribute name in the root
-   * context of a properties file. If this were the case, then adding another Attribute with the same name as a pre-existing Attribute to a new Context
-   * class would be a backwards incompatible change.
-   */
-
   /**
    * Get the attributes associated with this context.
    * The returned map does not contain any attributes that may have been defined in the parent context of this context.

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
index 8826896..94d18ba 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
@@ -1088,7 +1088,7 @@ public class LogicalPlan implements Serializable, DAG
       if (e.getKey().getOperatorWrapper() == om) {
          stream.sinks.remove(e.getKey());
       }
-      // If persistStream was enabled for stream, reset stream when sink removed 
+      // If persistStream was enabled for stream, reset stream when sink removed
       stream.resetStreamPersistanceOnSinkRemoval(e.getKey());
     }
     this.operators.remove(om.getName());
@@ -1431,11 +1431,11 @@ public class LogicalPlan implements Serializable, DAG
 
     for (StreamMeta s: streams.values()) {
       if (s.source == null) {
-        throw new ValidationException(String.format("stream source not connected: %s", s.getName()));
+        throw new ValidationException("Stream source not connected: " + s.getName());
       }
 
       if (s.sinks.isEmpty()) {
-        throw new ValidationException(String.format("stream sink not connected: %s", s.getName()));
+        throw new ValidationException("Stream sink not connected: " + s.getName());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/977093e1/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
index a3a18c2..7a53cd7 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
@@ -15,6 +15,7 @@
  */
 package com.datatorrent.stram.plan.logical;
 
+
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -22,14 +23,17 @@ import java.io.Serializable;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.Field;
 import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+
 import java.util.*;
 import java.util.Map.Entry;
 
-import jline.internal.Preconditions;
 
 import javax.validation.ValidationException;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -61,7 +65,6 @@ import com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta;
 import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
 import com.datatorrent.stram.plan.logical.LogicalPlan.OutputPortMeta;
 import com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta;
-import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration.StramElement;
 import com.datatorrent.stram.util.ObjectMapperFactory;
 
 /**
@@ -159,43 +162,16 @@ public class LogicalPlanConfiguration {
    */
   protected enum ConfElement
   {
-    @SuppressWarnings("SetReplaceableByEnumSet")
-    STRAM(null,
-          null,
-          new HashSet<StramElement>(),
-          null),
-    @SuppressWarnings("SetReplaceableByEnumSet")
-    APPLICATION(StramElement.APPLICATION,
-                STRAM,
-                new HashSet<StramElement>(),
-                DAGContext.class),
-    @SuppressWarnings("SetReplaceableByEnumSet")
-    TEMPLATE(StramElement.TEMPLATE,
-             STRAM,
-             new HashSet<StramElement>(),
-             null),
-    @SuppressWarnings("SetReplaceableByEnumSet")
-    GATEWAY(StramElement.GATEWAY,
-            ConfElement.APPLICATION,
-            new HashSet<StramElement>(),
-            null),
-    @SuppressWarnings("SetReplaceableByEnumSet")
-    OPERATOR(StramElement.OPERATOR,
-             ConfElement.APPLICATION,
-             new HashSet<StramElement>(),
-             OperatorContext.class),
-    @SuppressWarnings("SetReplaceableByEnumSet")
-    STREAM(StramElement.STREAM,
-           ConfElement.APPLICATION,
-           new HashSet<StramElement>(),
-           null),
-    PORT(StramElement.PORT,
-         ConfElement.OPERATOR,
-         Sets.newHashSet(StramElement.INPUT_PORT, StramElement.OUTPUT_PORT),
-         PortContext.class);
-
-    public static final Map<StramElement, ConfElement> STRAM_ELEMENT_TO_CONF_ELEMENT = Maps.newHashMap();
-    public static final Map<Class<? extends Context>, ConfElement> CONTEXT_TO_CONF_ELEMENT = Maps.newHashMap();
+    STRAM(null, null, null, null),
+    APPLICATION(StramElement.APPLICATION, STRAM, null, DAGContext.class),
+    TEMPLATE(StramElement.TEMPLATE, STRAM, null, null),
+    GATEWAY(StramElement.GATEWAY, ConfElement.APPLICATION, null, null),
+    OPERATOR(StramElement.OPERATOR, ConfElement.APPLICATION, null, OperatorContext.class),
+    STREAM(StramElement.STREAM, ConfElement.APPLICATION, null, null),
+    PORT(StramElement.PORT, ConfElement.OPERATOR, EnumSet.of(StramElement.INPUT_PORT, StramElement.OUTPUT_PORT), PortContext.class);
+
+    protected static final Map<StramElement, ConfElement> STRAM_ELEMENT_TO_CONF_ELEMENT = Maps.newHashMap();
+    protected static final Map<Class<? extends Context>, ConfElement> CONTEXT_TO_CONF_ELEMENT = Maps.newHashMap();
 
     static {
       initialize();
@@ -246,12 +222,8 @@ public class LogicalPlanConfiguration {
       }
 
       if (!ContextUtils.CONTEXT_CLASSES.equals(confElementContextClasses)) {
-        throw new IllegalStateException("All the context classes "
-                                        + ContextUtils.CONTEXT_CLASSES
-                                        + " found in "
-                                        + Context.class
-                                        + " are not used by ConfElements "
-                                        + confElementContextClasses);
+        throw new IllegalStateException("All the context classes " + ContextUtils.CONTEXT_CLASSES + " found in "
+                                        + Context.class + " are not used by ConfElements " + confElementContextClasses);
       }
     }
 
@@ -312,16 +284,15 @@ public class LogicalPlanConfiguration {
       this.element = element;
       this.parent = parent;
 
-      this.allRelatedElements.addAll(additionalRelatedElements);
+      if (additionalRelatedElements != null) {
+        this.allRelatedElements.addAll(additionalRelatedElements);
+      }
+
       this.allRelatedElements.add(element);
 
       this.contextClass = contextClass;
 
-      if (contextClass != null) {
-        this.contextAttributes = ContextUtils.CONTEXT_CLASS_TO_ATTRIBUTES.get(contextClass);
-      } else {
-        this.contextAttributes = Sets.newHashSet();
-      }
+      this.contextAttributes = contextClass != null ? ContextUtils.CONTEXT_CLASS_TO_ATTRIBUTES.get(contextClass) : new HashSet<String>();
     }
 
     private void setAllChildAttributes(Set<String> allChildAttributes)
@@ -445,8 +416,7 @@ public class LogicalPlanConfiguration {
      *
      * @param conf The current {@link Conf} type.
      * @return A path from the current {@link Conf} type to a root {@link Conf} type, which includes the current and root
-     * {
-     * @lin Conf} types.
+     * {@link Conf} types.
      */
     public static List<StramElement> getPathFromChildToRootInclusive(StramElement conf)
     {
@@ -471,8 +441,7 @@ public class LogicalPlanConfiguration {
      *
      * @param conf The current {@link Conf} type.
      * @return A path from the root {@link Conf} type to the current {@link Conf} type, which includes the current and root
-     * {
-     * @lin Conf} types.
+     * {@link Conf} types.
      */
     public static List<StramElement> getPathFromRootToChildInclusive(StramElement conf)
     {
@@ -487,11 +456,9 @@ public class LogicalPlanConfiguration {
      * @param child The current {@link Conf} type.
      * @param parent The parent {@link Conf} type.
      * @return A path from the current {@link Conf} type to a parent {@link Conf} type, which includes the current and parent
-     * {
-     * @lin Conf} types.
+     * {@link Conf} types.
      */
-    public static List<StramElement> getPathFromChildToParentInclusive(StramElement child,
-                                                                       StramElement parent)
+    public static List<StramElement> getPathFromChildToParentInclusive(StramElement child, StramElement parent)
     {
       ConfElement confElement = STRAM_ELEMENT_TO_CONF_ELEMENT.get(child);
 
@@ -528,11 +495,9 @@ public class LogicalPlanConfiguration {
      * @param child The current {@link Conf} type.
      * @param parent The parent {@link Conf} type.
      * @return A path from the parent {@link Conf} type to the current {@link Conf} type, which includes the current and parent
-     * {
-     * @lin Conf} types.
+     * {@link Conf} types.
      */
-    public static List<StramElement> getPathFromParentToChildInclusive(StramElement child,
-                                                                       StramElement parent)
+    public static List<StramElement> getPathFromParentToChildInclusive(StramElement child, StramElement parent)
     {
       List<StramElement> path = getPathFromChildToParentInclusive(child,
                                                                   parent);
@@ -548,8 +513,7 @@ public class LogicalPlanConfiguration {
      * @return The {@link ConfElement} that contains the given attribute, or null if no {@link ConfElement} contains
      * the given attribute.
      */
-    public static ConfElement findConfElementWithAttribute(ConfElement current,
-                                                           String simpleAttributeName)
+    public static ConfElement findConfElementWithAttribute(ConfElement current, String simpleAttributeName)
     {
       if (current.getContextAttributes().contains(simpleAttributeName)) {
         return current;
@@ -573,9 +537,7 @@ public class LogicalPlanConfiguration {
       List<StramElement> path = ConfElement.getPathFromParentToChildInclusive(childConfElement.getStramElement(),
                                                                               parentConf.getConfElement().getStramElement());
 
-      for (int pathIndex = 1;
-           pathIndex < path.size();
-           pathIndex++) {
+      for (int pathIndex = 1; pathIndex < path.size(); pathIndex++) {
         LOG.debug("Adding conf");
         StramElement pathElement = path.get(pathIndex);
         //Add the configurations we need to hold this attribute
@@ -593,12 +555,19 @@ public class LogicalPlanConfiguration {
   @SuppressWarnings("unchecked")
   protected static class ContextUtils
   {
-    public static final Map<Class<? extends Context>, Set<String>> CONTEXT_CLASS_TO_ATTRIBUTES;
-    public static final Set<Class<? extends Context>> CONTEXT_CLASSES;
-    public static final Map<Class<? extends Context>, Map<String, Attribute<?>>> CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE;
+    private static final Map<String, Type> ATTRIBUTES_TO_TYPE = Maps.newHashMap();
+    public static final Map<Class<? extends Context>, Set<String>> CONTEXT_CLASS_TO_ATTRIBUTES = Maps.newHashMap();
+    public static final Set<Class<? extends Context>> CONTEXT_CLASSES = Sets.newHashSet();
+    public static final Map<Class<? extends Context>, Map<String, Attribute<?>>> CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE = Maps.newHashMap();
 
     static {
-      CONTEXT_CLASSES = Sets.newHashSet();
+      initialize();
+    }
+
+    @VisibleForTesting
+    protected static void initialize()
+    {
+      CONTEXT_CLASSES.clear();
 
       for (Class<?> clazz: Context.class.getDeclaredClasses()) {
         if (!Context.class.isAssignableFrom(clazz)) {
@@ -608,9 +577,17 @@ public class LogicalPlanConfiguration {
         CONTEXT_CLASSES.add((Class<? extends Context>)clazz);
       }
 
-      CONTEXT_CLASS_TO_ATTRIBUTES = Maps.newHashMap();
+      buildAttributeMaps(CONTEXT_CLASSES);
+    }
 
-      for (Class<? extends Context> contextClass: CONTEXT_CLASSES) {
+    @VisibleForTesting
+    protected static void buildAttributeMaps(Set<Class<? extends Context>> contextClasses)
+    {
+      CONTEXT_CLASS_TO_ATTRIBUTES.clear();
+      CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE.clear();
+      ATTRIBUTES_TO_TYPE.clear();
+
+      for (Class<? extends Context> contextClass: contextClasses) {
         Set<String> contextAttributes = Sets.newHashSet();
 
         Field[] fields = contextClass.getDeclaredFields();
@@ -620,19 +597,29 @@ public class LogicalPlanConfiguration {
             continue;
           }
 
+          Type fieldType = ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
           contextAttributes.add(field.getName());
+
+          Type existingType = ATTRIBUTES_TO_TYPE.get(field.getName());
+
+          if (existingType != null && !existingType.equals(fieldType)) {
+            throw new ValidationException("The attribute " + field.getName() +
+                                          " is defined with two different types in two different context classes: " +
+                                          fieldType + " and " + existingType + "\n" +
+                                          "Attributes with the same name are required to have the same type accross all Context classes.");
+          }
+
+          ATTRIBUTES_TO_TYPE.put(field.getName(), fieldType);
         }
 
         CONTEXT_CLASS_TO_ATTRIBUTES.put(contextClass, contextAttributes);
       }
 
-      CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE = Maps.newHashMap();
-
-      for (Class<? extends Context> contextClass: CONTEXT_CLASSES) {
+      for (Class<? extends Context> contextClass: contextClasses) {
         Map<String, Attribute<?>> simpleAttributeNameToAttribute = Maps.newHashMap();
         CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE.put(contextClass, simpleAttributeNameToAttribute);
 
-        Set<Attribute<Object>> attributes = AttributeInitializer.getAttributesNoSave(contextClass);
+        Set<Attribute<Object>> attributes = AttributeInitializer.getAttributes(contextClass);
 
         LOG.debug("context class {} and attributes {}", contextClass, attributes);
 
@@ -644,6 +631,7 @@ public class LogicalPlanConfiguration {
 
     private ContextUtils()
     {
+      //Private construct to prevent instantiation of utility class
     }
 
     /**
@@ -735,6 +723,7 @@ public class LogicalPlanConfiguration {
 
     private AttributeParseUtils()
     {
+      //Private construct to prevent instantiation of utility class
     }
 
     /**
@@ -782,11 +771,7 @@ public class LogicalPlanConfiguration {
     {
 
       if (element != null && element != StramElement.ATTR) {
-        throw new IllegalArgumentException("The given "
-                                           + StramElement.class
-                                           + " must either have a value of null or "
-                                           + StramElement.ATTR
-                                           + " but it had a value of " + element);
+        throw new IllegalArgumentException("The given " + StramElement.class + " must either have a value of null or " + StramElement.ATTR + " but it had a value of " + element);
       }
 
       String attributeName;
@@ -823,9 +808,7 @@ public class LogicalPlanConfiguration {
     public static Class<? extends Context> getContainingContextClass(String attributeName)
     {
       if (isSimpleAttributeName(attributeName)) {
-        throw new IllegalArgumentException("The given attribute name "
-                                           + attributeName
-                                           + " is simple.");
+        throw new IllegalArgumentException("The given attribute name " + attributeName + " is simple.");
       }
 
       LOG.debug("Attribute Name {}", attributeName);
@@ -847,9 +830,7 @@ public class LogicalPlanConfiguration {
         if (Context.class.isAssignableFrom(clazz)) {
           contextClass = (Class<? extends Context>)clazz;
         } else {
-          throw new IllegalArgumentException("The provided context class name "
-                                             + contextClassName
-                                             + " is not valid.");
+          throw new IllegalArgumentException("The provided context class name " + contextClassName + " is not valid.");
         }
       } catch (ClassNotFoundException ex) {
         throw new IllegalArgumentException(ex);
@@ -858,9 +839,7 @@ public class LogicalPlanConfiguration {
       String simpleAttributeName = getSimpleAttributeName(attributeName);
 
       if (!ContextUtils.CONTEXT_CLASS_TO_ATTRIBUTES.get(contextClass).contains(simpleAttributeName)) {
-        throw new ValidationException(simpleAttributeName
-                                      + " is not a valid attribute of "
-                                      + contextClass);
+        throw new ValidationException(simpleAttributeName + " is not a valid attribute of " + contextClass);
       }
 
       return contextClass;
@@ -879,9 +858,7 @@ public class LogicalPlanConfiguration {
       }
 
       if (attributeName.endsWith(KEY_SEPARATOR)) {
-        throw new IllegalArgumentException("The given attribute name ends with \""
-                                           + KEY_SEPARATOR
-                                           + "\" so a simple name cannot be extracted.");
+        throw new IllegalArgumentException("The given attribute name ends with \"" + KEY_SEPARATOR + "\" so a simple name cannot be extracted.");
       }
 
       return attributeName.substring(attributeName.lastIndexOf(KEY_SEPARATOR) + 1, attributeName.length());
@@ -961,6 +938,13 @@ public class LogicalPlanConfiguration {
       return (T)parentConf;
     }
 
+    /**
+     * Gets an ancestor {@link Conf} of this {@link Conf} of the given {@link StramElement} type.
+     * @param <T> The {@link Conf} Class of the ancestor conf
+     * @param ancestorElement The {@link StramElement} representing the type of the ancestor {@link Conf}.
+     * @return The ancestor {@link Conf} of the corresponding {@link StramElement} type, or null if no ancestor {@link Conf} with
+     * the given {@link StramElement} type exists.
+     */
     @SuppressWarnings("unchecked")
     public <T extends Conf> T getAncestorConf(StramElement ancestorElement) {
       if (getConfElement().getStramElement() == ancestorElement) {
@@ -973,6 +957,16 @@ public class LogicalPlanConfiguration {
       }
     }
 
+    /**
+     * This method retrieves a child {@link Conf} of the given {@link StramElement} type with the given name. If
+     * a child {@link Conf} with the given name and {@link StramElement} type doesn't exist, then it is added.
+     * @param <T> The type of the child {@link Conf}.
+     * @param id The name of the child {@link Conf}.
+     * @param childType The {@link StramElement} representing the type of the child {@link Conf}.
+     * @param clazz The {@link java.lang.Class} of the child {@link Conf} to add if a {@link Conf} of the given id
+     * and {@link StramElement} type is not present.
+     * @return A child {@link Conf} of this {@link Conf} with the given id and {@link StramElement} type.
+     */
     public <T extends Conf> T getOrAddChild(String id, StramElement childType, Class<T> clazz) {
       @SuppressWarnings("unchecked")
       Map<String, T> elChildren = (Map<String, T>)children.get(childType);
@@ -999,6 +993,15 @@ public class LogicalPlanConfiguration {
       properties.setDefaultProperties(defaults);
     }
 
+    /**
+     * This method returns a list of all the child {@link Conf}s of this {@link Conf} with the matching name
+     * and {@link StramElement} type.
+     * @param <T> The types of the child {@link Conf}s.
+     * @param name The name of the child {@link Conf}s to return. If the name of the specified child {@link Conf}
+     * is null then configurations with the name specified as a {@link LogicalPlanConfiguration#WILDCARD} are matched.
+     * @param childType The {@link StramElement} corresponding to the type of a child {@link Conf}.
+     * @return The list of child {@link Conf}s with a matching name and {@link StramElement} type.
+     */
     public <T extends Conf> List<T> getMatchingChildConf(String name, StramElement childType) {
       List<T> childConfs = new ArrayList<>();
       Map<String, T> elChildren = getChildren(childType);
@@ -1038,6 +1041,17 @@ public class LogicalPlanConfiguration {
       return childConfs;
     }
 
+    /**
+     * Returns the {@link Conf} corresponding to the given id from the given map. If a {@link Conf} with the
+     * given id is not present in the given map, then a new {@link Conf} of the given class is created and added
+     * to the map.
+     * @param <T> The type of the {@link Conf}s contained in the map.
+     * @param map The map to retrieve a {@link Conf} from or add a {@link Conf} to.
+     * @param id The name of the {@link Conf} to retrieve from or add to the given map.
+     * @param clazz The {@link java.lang.Class} of the {@link Conf} to add to the given map, if a {@link Conf} with
+     * the given name is not present in the given map.
+     * @return A {@link Conf} with the given name, contained in the given map.
+     */
     protected <T extends Conf> T getOrAddConf(Map<String, T> map, String id, Class<T> clazz) {
       T conf = map.get(id);
       if (conf == null) {
@@ -1046,12 +1060,7 @@ public class LogicalPlanConfiguration {
           conf = declaredConstructor.newInstance(new Object[] {});
           conf.setId(id);
           map.put(id, conf);
-        } catch (IllegalAccessException |
-                 IllegalArgumentException |
-                 InstantiationException |
-                 NoSuchMethodException |
-                 SecurityException |
-                 InvocationTargetException e) {
+        } catch (IllegalAccessException | IllegalArgumentException | InstantiationException | NoSuchMethodException | SecurityException | InvocationTargetException e) {
           LOG.error("Error instantiating configuration", e);
         }
       }
@@ -1470,6 +1479,19 @@ public class LogicalPlanConfiguration {
     elementMaps.put(StramElement.OUTPUT_PORT, PortConf.class);
   }
 
+  /**
+   * This is a helper method which performs the following checks:<br/><br/>
+   * <ol>
+   *    <li>If the given {@link StramElement} corresponds to a {@link Conf} type which is
+   * the same as the type of the given {@link Conf}, then the given {@link Conf} is returned.</li>
+   *    <li>If the given {@link StramElement} corresponds to a {@link Conf} type which is
+   * a valid parent {@link Conf} type for the given ancestorConf, then the given ancestor {@link Conf} is
+   * returned.</li>
+   * @param element The {@link StramElement} type corresponding to this {@link Conf} or
+   * to a valid ancestor {@link Conf}.
+   * @param ancestorConf The {@link Conf} to return.
+   * @return The given {@link Conf}, or null if the first call to this method passes a null {@link StramElement}.
+   */
   private static Conf getConf(StramElement element, Conf ancestorConf) {
     if (element == ancestorConf.getConfElement().getStramElement()) {
       return ancestorConf;
@@ -1481,9 +1503,23 @@ public class LogicalPlanConfiguration {
     }
     StramElement parentElement = ConfElement.getAllowedParentConf(element);
     Conf parentConf = getConf(parentElement, ancestorConf);
+
+    if(parentConf == null) {
+      throw new IllegalArgumentException("The given StramElement is not the same type as the given ancestorConf, " +
+                                         "and it is not a valid type for a parent conf.");
+    }
+
     return parentConf.getOrAddChild(WILDCARD, element, elementMaps.get(element));
   }
 
+  /**
+   * This method adds a child {@link Conf} with the given {@link StramElement} type and name to the given
+   * ancestorConf.
+   * @param element The {@link StramElement} of the child {@link Conf} to add to the given ancestorConf.
+   * @param name The name of the child {@link Conf} to add to the given ancestorConf.
+   * @param ancestorConf The {@link Conf} to add a child {@link Conf} to.
+   * @return The child {@link Conf} that was added to the given ancestorConf.
+   */
   private static Conf addConf(StramElement element, String name, Conf ancestorConf) {
     StramElement parentElement = ConfElement.getAllowedParentConf(element);
     Conf conf1 = null;
@@ -1494,6 +1530,16 @@ public class LogicalPlanConfiguration {
     return conf1;
   }
 
+  /**
+   * This method returns a list of all the child {@link Conf}s of the given {@link List} of {@link Conf}s with the matching name
+   * and {@link StramElement} type.
+   * @param <T> The types of the child {@link Conf}s.
+   * @param confs The list of {@link Conf}s whose children will be searched.
+   * @param name The name of the child {@link Conf}s to return. If the name of the specified child {@link Conf}
+   * is null then configurations with the name specified as a {@link LogicalPlanConfiguration#WILDCARD} are matched.
+   * @param childType The {@link StramElement} corresponding to the type of a child {@link Conf}.
+   * @return The list of child {@link Conf}s with a matching name and {@link StramElement} type.
+   */
   private <T extends Conf> List<T> getMatchingChildConf(List<? extends Conf> confs, String name, StramElement childType) {
     List<T> childConfs = Lists.newArrayList();
     for (Conf conf1 : confs) {
@@ -1685,7 +1731,7 @@ public class LogicalPlanConfiguration {
    * @param index The current index that the parser is on for processing the property name.
    * @param propertyName The original unsplit Apex property name.
    * @param propertyValue The value corresponding to the Apex property.
-   * @param conf
+   * @param conf The current {@link Conf} to add properties to.
    */
   private void parseStramPropertyTokens(String[] keys, int index, String propertyName, String propertyValue, Conf conf) {
     if (index < keys.length) {
@@ -1697,104 +1743,141 @@ public class LogicalPlanConfiguration {
       if ((element == StramElement.APPLICATION) || (element == StramElement.OPERATOR) || (element == StramElement.STREAM)
               || (element == StramElement.PORT) || (element == StramElement.INPUT_PORT) || (element == StramElement.OUTPUT_PORT)
               || (element == StramElement.TEMPLATE)) {
-        if ((index + 1) < keys.length) {
-          String name = keys[index+1];
-          Conf elConf = addConf(element, name, conf);
-          if (elConf != null) {
-            parseStramPropertyTokens(keys, index + 2, propertyName, propertyValue, elConf);
-          } else {
-            LOG.error("Invalid configuration key: {}", propertyName);
-          }
-        } else {
-          LOG.warn("Invalid configuration key: {}", propertyName);
-        }
-      } else if ((element == StramElement.GATEWAY)) {
-        Conf elConf = addConf(element, null, conf);
-        if (elConf != null) {
-          parseStramPropertyTokens(keys, index+1, propertyName, propertyValue, elConf);
-        } else {
-          LOG.error("Invalid configuration key: {}", propertyName);
-        }
+        parseAppElement(index, keys, element, conf, propertyName, propertyValue);
+      } else if (element == StramElement.GATEWAY) {
+        parseGatewayElement(element, conf, keys, index, propertyName, propertyValue);
       } else if ((element == StramElement.ATTR) || ((element == null) && (conf.getDefaultChildElement() == StramElement.ATTR))) {
-        String attributeName = AttributeParseUtils.getAttributeName(element, keys, index);
-
-        if (element != StramElement.ATTR) {
-          String expName = getCompleteKey(keys, 0, index) + KEY_SEPARATOR + StramElement.ATTR.getValue() + KEY_SEPARATOR + attributeName;
-          LOG.warn("Referencing the attribute as {} instead of {} is deprecated!", getCompleteKey(keys, 0), expName);
-        }
-
-        if (conf.getConfElement().getStramElement() == null) {
-          conf = addConf(StramElement.APPLICATION, WILDCARD, conf);
-        }
-
-        if (conf != null) {
-          if (AttributeParseUtils.isSimpleAttributeName(attributeName)) {
-            //The provided attribute name was a simple name
-
-            if (!AttributeParseUtils.ALL_SIMPLE_ATTRIBUTE_NAMES.contains(attributeName)) {
-              throw new ValidationException("Invalid attribute reference: " + getCompleteKey(keys, 0));
-            }
-
-            if (!conf.getConfElement().getAllChildAttributes().contains(attributeName)) {
-              throw new ValidationException(attributeName
-                                            + " is not defined for the "
-                                            + conf.getConfElement().getStramElement()
-                                            + " or any of its child configurations.");
-            }
-
-            if (conf.getConfElement().getAmbiguousAttributes().contains(attributeName)) {
-              //If the attribute name is ambiguous at this configuration level we should tell the user.
-              LOG.warn("The attribute "
-                       + attributeName
-                       + " is ambiguous when specified on an " + conf.getConfElement().getStramElement());
-            }
-
-            if (conf.getConfElement().getContextAttributes().contains(attributeName)) {
-              @SuppressWarnings("unchecked")
-              Attribute<Object> attr = (Attribute<Object>)ContextUtils.CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE.get(conf.getConfElement().getContextClass()).get(attributeName);
-              conf.setAttribute(attr, propertyValue);
-            } else {
-              AttributeParseUtils.processAllConfsForAttribute(conf, attributeName, propertyValue);
-            }
-          } else {
-            //This is a FQ attribute name
-            Class<? extends Context> contextClass = AttributeParseUtils.getContainingContextClass(attributeName);
-
-            //Convert to a simple name
-            attributeName = AttributeParseUtils.getSimpleAttributeName(attributeName);
-
-            if (!ContextUtils.CONTEXT_CLASS_TO_ATTRIBUTES.get(contextClass).contains(attributeName)) {
-              throw new ValidationException(attributeName + " is not a valid attribute in " + contextClass.getCanonicalName());
-            }
+        parseAttributeElement(element, keys, index, conf, propertyValue, propertyName);
+      } else if ((element == StramElement.PROP) || ((element == null) && (conf.getDefaultChildElement() == StramElement.PROP))) {
+        parsePropertyElement(element, keys, index, conf, propertyValue, propertyName);
+      } else if (element != null) {
+        conf.parseElement(element, keys, index, propertyValue);
+      }
+    }
+  }
 
-            ConfElement confWithAttr = ConfElement.CONTEXT_TO_CONF_ELEMENT.get(contextClass);
+  /**
+   * This is a helper method for {@link #parseStramPropertyTokens} which is responsible for parsing an app element.
+   * @param element The current {@link StramElement} of the property being parsed.
+   * @param keys The keys that the property being parsed was split into.
+   * @param index The current key that the parser is on.
+   * @param propertyValue The value associated with the property being parsed.
+   * @param propertyName The complete unprocessed name of the property being parsed.
+   */
+  private void parseAppElement(int index, String[] keys, StramElement element, Conf conf1, String propertyName, String propertyValue)
+  {
+    if ((index + 1) < keys.length) {
+      String name = keys[index+1];
+      Conf elConf = addConf(element, name, conf1);
+      if (elConf != null) {
+        parseStramPropertyTokens(keys, index + 2, propertyName, propertyValue, elConf);
+      } else {
+        LOG.error("Invalid configuration key: {}", propertyName);
+      }
+    } else {
+      LOG.warn("Invalid configuration key: {}", propertyName);
+    }
+  }
 
-            conf = ConfElement.addConfs(conf, confWithAttr);
+  /**
+   * This is a helper method for {@link #parseStramPropertyTokens} which is responsible for parsing a gateway element.
+   * @param element The current {@link StramElement} of the property being parsed.
+   * @param keys The keys that the property being parsed was split into.
+   * @param index The current key that the parser is on.
+   * @param propertyValue The value associated with the property being parsed.
+   * @param propertyName The complete unprocessed name of the property being parsed.
+   */
+  private void parseGatewayElement(StramElement element, Conf conf1, String[] keys, int index, String propertyName, String propertyValue)
+  {
+    Conf elConf = addConf(element, null, conf1);
+    if (elConf != null) {
+      parseStramPropertyTokens(keys, index+1, propertyName, propertyValue, elConf);
+    } else {
+      LOG.error("Invalid configuration key: {}", propertyName);
+    }
+  }
 
-            @SuppressWarnings("unchecked")
-            Attribute<Object> attr = (Attribute<Object>)ContextUtils.CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE.get(confWithAttr.getContextClass()).get(attributeName);
-            conf.setAttribute(attr, propertyValue);
-          }
-        } else {
-          LOG.error("Invalid configuration key: {}", propertyName);
+  /**
+   * This is a helper method for {@link #parseStramPropertyTokens} which is responsible for parsing an attribute.
+   * @param element The current {@link StramElement} of the property being parsed.
+   * @param keys The keys that the property being parsed was split into.
+   * @param index The current key that the parser is on.
+   * @param conf The current {@link Conf}.
+   * @param propertyValue The value associated with the property being parsed.
+   * @param propertyName The complete unprocessed name of the property being parsed.
+   */
+  private void parseAttributeElement(StramElement element, String[] keys, int index, Conf conf, String propertyValue, String propertyName)
+  {
+    String attributeName = AttributeParseUtils.getAttributeName(element, keys, index);
+    if (element != StramElement.ATTR) {
+      String expName = getCompleteKey(keys, 0, index) + KEY_SEPARATOR + StramElement.ATTR.getValue() + KEY_SEPARATOR + attributeName;
+      LOG.warn("Referencing the attribute as {} instead of {} is deprecated!", getCompleteKey(keys, 0), expName);
+    }
+    if (conf.getConfElement().getStramElement() == null) {
+      conf = addConf(StramElement.APPLICATION, WILDCARD, conf);
+    }
+    if (conf != null) {
+      if (AttributeParseUtils.isSimpleAttributeName(attributeName)) {
+        //The provided attribute name was a simple name
+        if (!AttributeParseUtils.ALL_SIMPLE_ATTRIBUTE_NAMES.contains(attributeName)) {
+          throw new ValidationException("Invalid attribute reference: " + getCompleteKey(keys, 0));
         }
-      } else if ((element == StramElement.PROP) || ((element == null) && (conf.getDefaultChildElement() == StramElement.PROP))) {
-        // Currently opProps are only supported on operators and streams
-        // Supporting current implementation where property can be directly specified under operator
-        String prop;
-        if (element == StramElement.PROP) {
-          prop = getCompleteKey(keys, index+1);
-        } else {
-          prop = getCompleteKey(keys, index);
+        if (!conf.getConfElement().getAllChildAttributes().contains(attributeName)) {
+          throw new ValidationException(attributeName + " is not defined for the " + conf.getConfElement().getStramElement() + " or any of its child configurations.");
+        }
+        if (conf.getConfElement().getAmbiguousAttributes().contains(attributeName)) {
+          //If the attribute name is ambiguous at this configuration level we should tell the user.
+          LOG.warn("The attribute " + attributeName + " is ambiguous when specified on an " + conf.getConfElement().getStramElement());
         }
-        if (prop != null) {
-          conf.setProperty(prop, propertyValue);
+        if (conf.getConfElement().getContextAttributes().contains(attributeName)) {
+          @SuppressWarnings(value = "unchecked")
+                  Attribute<Object> attr = (Attribute<Object>)ContextUtils.CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE.get(conf.getConfElement().getContextClass()).get(attributeName);
+          conf.setAttribute(attr, propertyValue);
         } else {
-          LOG.warn("Invalid property specification, no property name specified for {}", propertyName);
+          AttributeParseUtils.processAllConfsForAttribute(conf, attributeName, propertyValue);
         }
-      } else if (element != null) {
-        conf.parseElement(element, keys, index, propertyValue);
+      } else {
+        //This is a FQ attribute name
+        Class<? extends Context> contextClass = AttributeParseUtils.getContainingContextClass(attributeName);
+        //Convert to a simple name
+        attributeName = AttributeParseUtils.getSimpleAttributeName(attributeName);
+        if (!ContextUtils.CONTEXT_CLASS_TO_ATTRIBUTES.get(contextClass).contains(attributeName)) {
+          throw new ValidationException(attributeName + " is not a valid attribute in " + contextClass.getCanonicalName());
+        }
+        ConfElement confWithAttr = ConfElement.CONTEXT_TO_CONF_ELEMENT.get(contextClass);
+        conf = ConfElement.addConfs(conf, confWithAttr);
+        @SuppressWarnings("unchecked")
+        Attribute<Object> attr = (Attribute<Object>)ContextUtils.CONTEXT_TO_ATTRIBUTE_NAME_TO_ATTRIBUTE.get(confWithAttr.getContextClass()).get(attributeName);
+        conf.setAttribute(attr, propertyValue);
       }
+    } else {
+      LOG.error("Invalid configuration key: {}", propertyName);
+    }
+  }
+
+  /**
+   * This is a helper method for {@link #parseStramPropertyTokens} which is responsible for parsing a prop.
+   * @param element The current {@link StramElement} of the property being parsed.
+   * @param keys The keys that the property being parsed was split into.
+   * @param index The current key that the parser is on.
+   * @param conf The current {@link Conf}.
+   * @param propertyValue The value associated with the property being parsed.
+   * @param propertyName The complete unprocessed name of the property being parsed.
+   */
+  private void parsePropertyElement(StramElement element, String[] keys, int index, Conf conf, String propertyValue, String propertyName)
+  {
+    // Currently opProps are only supported on operators and streams
+    // Supporting current implementation where property can be directly specified under operator
+    String prop;
+    if (element == StramElement.PROP) {
+      prop = getCompleteKey(keys, index+1);
+    } else {
+      prop = getCompleteKey(keys, index);
+    }
+    if (prop != null) {
+      conf.setProperty(prop, propertyValue);
+    } else {
+      LOG.warn("Invalid property specification, no property name specified for {}", propertyName);
     }
   }
 
@@ -1831,7 +1914,12 @@ public class LogicalPlanConfiguration {
    * @return The completed key.
    */
   private static String getCompleteKey(String[] keys, int start, int end) {
-    StringBuilder sb = new StringBuilder(1024);
+    int length = 0;
+    for (int keyIndex = 0; keyIndex < keys.length; keyIndex++) {
+      length += keys[keyIndex].length();
+    }
+
+    StringBuilder sb = new StringBuilder(length);
     for (int i = start; i < end; ++i) {
       if (i > start) {
         sb.append(KEY_SEPARATOR);