You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@apex.apache.org by ch...@apache.org on 2015/09/10 00:12:32 UTC

[01/50] incubator-apex-core git commit: get ready for next release

Repository: incubator-apex-core
Updated Branches:
  refs/heads/master b7475a04e -> d7c8964b7


get ready for next release


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/66a75e01
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/66a75e01
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/66a75e01

Branch: refs/heads/master
Commit: 66a75e011681fbf4b89a024ecc0fb1fe321fde52
Parents: 9d03e25
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Mon Aug 3 11:25:44 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Mon Aug 3 11:25:44 2015 -0700

----------------------------------------------------------------------
 apex-app-archetype/pom.xml  | 2 +-
 apex-conf-archetype/pom.xml | 2 +-
 api/pom.xml                 | 2 +-
 bufferserver/pom.xml        | 4 ++--
 common/pom.xml              | 2 +-
 engine/pom.xml              | 2 +-
 pom.xml                     | 2 +-
 7 files changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/66a75e01/apex-app-archetype/pom.xml
----------------------------------------------------------------------
diff --git a/apex-app-archetype/pom.xml b/apex-app-archetype/pom.xml
index 9f97937..7d931e3 100644
--- a/apex-app-archetype/pom.xml
+++ b/apex-app-archetype/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.0.0</version>
+    <version>3.1.0-SNAPSHOT</version>
   </parent>
 
   <artifactId>apex-app-archetype</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/66a75e01/apex-conf-archetype/pom.xml
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/pom.xml b/apex-conf-archetype/pom.xml
index fff78a9..1f510a2 100644
--- a/apex-conf-archetype/pom.xml
+++ b/apex-conf-archetype/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.0.0</version>
+    <version>3.1.0-SNAPSHOT</version>
   </parent>
 
   <artifactId>apex-conf-archetype</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/66a75e01/api/pom.xml
----------------------------------------------------------------------
diff --git a/api/pom.xml b/api/pom.xml
index 23d1e08..ff3f441 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -6,7 +6,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.0.0</version>
+    <version>3.1.0-SNAPSHOT</version>
   </parent>
 
   <artifactId>dt-api</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/66a75e01/bufferserver/pom.xml
----------------------------------------------------------------------
diff --git a/bufferserver/pom.xml b/bufferserver/pom.xml
index 78444ff..02ea4a4 100644
--- a/bufferserver/pom.xml
+++ b/bufferserver/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.0.0</version>
+    <version>3.1.0-SNAPSHOT</version>
   </parent>
 
   <artifactId>dt-bufferserver</artifactId>
@@ -28,7 +28,7 @@
     <dependency>
       <groupId>com.datatorrent</groupId>
       <artifactId>dt-common</artifactId>
-      <version>3.0.0</version>
+      <version>3.1.0-SNAPSHOT</version>
       <type>jar</type>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/66a75e01/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index ba5009b..7079353 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.0.0</version>
+    <version>3.1.0-SNAPSHOT</version>
   </parent>
 
   <artifactId>dt-common</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/66a75e01/engine/pom.xml
----------------------------------------------------------------------
diff --git a/engine/pom.xml b/engine/pom.xml
index da43657..1e6a7ed 100644
--- a/engine/pom.xml
+++ b/engine/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.0.0</version>
+    <version>3.1.0-SNAPSHOT</version>
   </parent>
 
   <artifactId>dt-engine</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/66a75e01/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 82971d2..852cc3e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
 
   <groupId>com.datatorrent</groupId>
   <artifactId>dt-framework</artifactId>
-  <version>3.0.0</version>
+  <version>3.1.0-SNAPSHOT</version>
   <packaging>pom</packaging>
 
   <name>Realtime Stream Processing Framework</name>


[21/50] incubator-apex-core git commit: Merge pull request #116 from 243826/semantic-versioning

Posted by ch...@apache.org.
Merge pull request #116 from 243826/semantic-versioning

enable semantic versioning

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/3f8f97ed
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/3f8f97ed
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/3f8f97ed

Branch: refs/heads/master
Commit: 3f8f97ed7cddddfd04d28a34141ab1a0531fb4be
Parents: 4e49219 352cc61
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Fri Aug 7 14:38:39 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Fri Aug 7 14:38:39 2015 -0700

----------------------------------------------------------------------
 api/pom.xml    | 40 ++++++++++++++++++++++++++++++++++++++++
 common/pom.xml | 46 ++++++++++++++++++++++++++++++++++++++++++++++
 2 files changed, 86 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3f8f97ed/api/pom.xml
----------------------------------------------------------------------


[07/50] incubator-apex-core git commit: Merge pull request #108 from gauravgopi123/APEX-13-PR1

Posted by ch...@apache.org.
Merge pull request #108 from gauravgopi123/APEX-13-PR1

APEX-19 APEX-13 #resolve made checkpoint writes asynchronous

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/3c0076a5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/3c0076a5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/3c0076a5

Branch: refs/heads/master
Commit: 3c0076a51be71be4ac0e74e6f5b0d3763a148b3e
Parents: a3e9dfa 29eb6c3
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Tue Aug 4 17:10:35 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Tue Aug 4 17:10:35 2015 -0700

----------------------------------------------------------------------
 .../common/util/AsyncFSStorageAgent.java        | 111 ++++++++++++++++
 .../datatorrent/common/util/FSStorageAgent.java |   3 +-
 .../common/codec/JsonStreamCodecTest.java       |  15 ++-
 .../common/util/AsyncFSStorageAgentTest.java    | 133 +++++++++++++++++++
 .../java/com/datatorrent/stram/StramClient.java |   5 +-
 .../datatorrent/stram/StramLocalCluster.java    |   4 +-
 .../stram/StreamingAppMasterService.java        |   2 +-
 .../stram/StreamingContainerManager.java        |  10 +-
 .../java/com/datatorrent/stram/engine/Node.java |  64 ++++++++-
 .../stram/plan/physical/PhysicalPlan.java       |   8 +-
 .../com/datatorrent/stram/CheckpointTest.java   |  11 +-
 .../stram/LogicalPlanModificationTest.java      |  22 ++-
 .../com/datatorrent/stram/PartitioningTest.java |  26 +++-
 .../stram/StramLocalClusterTest.java            |  22 ++-
 .../datatorrent/stram/StramMiniClusterTest.java |   9 +-
 .../datatorrent/stram/StramRecoveryTest.java    |  56 ++++++--
 .../stram/StreamingContainerManagerTest.java    |  45 ++++++-
 .../stram/debug/TupleRecorderTest.java          |   3 +
 .../stram/engine/AutoMetricTest.java            |   2 +
 .../stram/engine/InputOperatorTest.java         |   5 +-
 .../stram/engine/ProcessingModeTests.java       |   9 ++
 .../datatorrent/stram/engine/SliderTest.java    |   5 +
 .../com/datatorrent/stram/engine/StatsTest.java |  10 +-
 .../stram/engine/WindowGeneratorTest.java       |  11 +-
 .../stram/webapp/StramWebServicesTest.java      |   6 +-
 25 files changed, 527 insertions(+), 70 deletions(-)
----------------------------------------------------------------------



[10/50] incubator-apex-core git commit: Merge branch 'SchemaSupport_3.1.0' of https://github.com/chandnisingh/Apex into chandnisingh-SchemaSupport_3.1.0

Posted by ch...@apache.org.
Merge branch 'SchemaSupport_3.1.0' of https://github.com/chandnisingh/Apex into chandnisingh-SchemaSupport_3.1.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/19d66582
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/19d66582
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/19d66582

Branch: refs/heads/master
Commit: 19d665824974c5406b642d0c1aba83c699499b71
Parents: 528423a 61929b5
Author: thomas <th...@datatorrent.com>
Authored: Wed Aug 5 20:46:42 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Wed Aug 5 20:46:42 2015 -0700

----------------------------------------------------------------------
 .../main/java/com/datatorrent/api/Context.java  |   7 +
 .../annotation/InputPortFieldAnnotation.java    |  10 +-
 .../annotation/OutputPortFieldAnnotation.java   |  10 +
 .../java/com/datatorrent/stram/cli/DTCli.java   |  15 +-
 .../stram/plan/logical/LogicalPlan.java         |  15 ++
 .../plan/logical/LogicalPlanConfiguration.java  |  26 +-
 .../stram/webapp/OperatorDiscoverer.java        | 235 +++++++++++++------
 .../com/datatorrent/stram/webapp/TypeGraph.java |  28 +++
 .../plan/LogicalPlanConfigurationTest.java      |  65 ++++-
 .../stram/plan/SchemaTestOperator.java          |  33 +++
 .../stram/webapp/OperatorDiscoveryTest.java     |  64 ++++-
 .../src/test/resources/schemaTestTopology.json  |  43 ++++
 12 files changed, 473 insertions(+), 78 deletions(-)
----------------------------------------------------------------------



[48/50] incubator-apex-core git commit: Added @Since, Preparing for Release 3.1.0

Posted by ch...@apache.org.
Added @Since, Preparing for Release 3.1.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/ebc83f8c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/ebc83f8c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/ebc83f8c

Branch: refs/heads/master
Commit: ebc83f8cba7169be57c685c408a9c5f18071ac2c
Parents: b37262d
Author: DataTorrent CI <je...@datatorrent.com>
Authored: Tue Sep 1 03:58:50 2015 -0700
Committer: DataTorrent CI <je...@datatorrent.com>
Committed: Tue Sep 1 03:58:50 2015 -0700

----------------------------------------------------------------------
 apex-app-archetype/README.md                                     | 2 +-
 apex-app-archetype/pom.xml                                       | 2 +-
 .../src/test/resources/projects/basic/archetype.properties       | 2 +-
 apex-conf-archetype/README.md                                    | 2 +-
 apex-conf-archetype/pom.xml                                      | 2 +-
 .../src/test/resources/projects/basic/archetype.properties       | 2 +-
 api/pom.xml                                                      | 4 ++--
 bufferserver/pom.xml                                             | 4 ++--
 common/pom.xml                                                   | 4 ++--
 .../java/com/datatorrent/common/util/AsyncFSStorageAgent.java    | 3 +++
 engine/pom.xml                                                   | 2 +-
 .../test/java/com/datatorrent/stram/client/AppPackageTest.java   | 2 +-
 engine/src/test/resources/testAppPackage/mydtapp/pom.xml         | 2 +-
 pom.xml                                                          | 2 +-
 14 files changed, 19 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/apex-app-archetype/README.md
----------------------------------------------------------------------
diff --git a/apex-app-archetype/README.md b/apex-app-archetype/README.md
index 7d79ddb..a3b7d08 100644
--- a/apex-app-archetype/README.md
+++ b/apex-app-archetype/README.md
@@ -6,7 +6,7 @@ How to Generate an Apex Application Project Template
 
 Run the following command
 
-    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-app-archetype -DarchetypeVersion=3.1.0-RC2 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexapp -Dversion=1.0-SNAPSHOT
+    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-app-archetype -DarchetypeVersion=3.1.0 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexapp -Dversion=1.0-SNAPSHOT
 
 Using your favorite IDE, open the project that has just been created by the above command.
 Write your application code and optionally operator code 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/apex-app-archetype/pom.xml
----------------------------------------------------------------------
diff --git a/apex-app-archetype/pom.xml b/apex-app-archetype/pom.xml
index c1f4c6c..a1361b7 100644
--- a/apex-app-archetype/pom.xml
+++ b/apex-app-archetype/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC2</version>
+    <version>3.1.0</version>
   </parent>
 
   <artifactId>apex-app-archetype</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
----------------------------------------------------------------------
diff --git a/apex-app-archetype/src/test/resources/projects/basic/archetype.properties b/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
index 839c22c..1c56692 100644
--- a/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
+++ b/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
@@ -3,4 +3,4 @@ package=it.pkg
 version=0.1-SNAPSHOT
 groupId=archetype.it
 artifactId=basic
-archetypeVersion=3.1.0-RC2
+archetypeVersion=3.1.0

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/apex-conf-archetype/README.md
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/README.md b/apex-conf-archetype/README.md
index 8bd6b0d..71edaba 100644
--- a/apex-conf-archetype/README.md
+++ b/apex-conf-archetype/README.md
@@ -6,7 +6,7 @@ How to Generate a Apex App Configuration Project Template
 
 Run the following command
 
-    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-conf-archetype -DarchetypeVersion=3.1.0-RC2 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexconf -Dversion=1.0-SNAPSHOT
+    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-conf-archetype -DarchetypeVersion=3.1.0 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexconf -Dversion=1.0-SNAPSHOT
 
 Using your favorite IDE, open the project that has just been created by the above command.
 Write your application code and optionally operator code 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/apex-conf-archetype/pom.xml
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/pom.xml b/apex-conf-archetype/pom.xml
index faa70cd..3e63a1a 100644
--- a/apex-conf-archetype/pom.xml
+++ b/apex-conf-archetype/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC2</version>
+    <version>3.1.0</version>
   </parent>
 
   <artifactId>apex-conf-archetype</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties b/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
index 839c22c..1c56692 100644
--- a/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
+++ b/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
@@ -3,4 +3,4 @@ package=it.pkg
 version=0.1-SNAPSHOT
 groupId=archetype.it
 artifactId=basic
-archetypeVersion=3.1.0-RC2
+archetypeVersion=3.1.0

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/api/pom.xml
----------------------------------------------------------------------
diff --git a/api/pom.xml b/api/pom.xml
index d0cdc1f..cf94b03 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -6,7 +6,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC2</version>
+    <version>3.1.0</version>
   </parent>
 
   <artifactId>dt-api</artifactId>
@@ -38,7 +38,7 @@
             <dependency>
               <groupId>com.datatorrent</groupId>
               <artifactId>dt-api</artifactId>
-              <version>3.1.0-RC2</version>
+              <version>3.1.0</version>
             </dependency>
           </oldVersion>
           <newVersion>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/bufferserver/pom.xml
----------------------------------------------------------------------
diff --git a/bufferserver/pom.xml b/bufferserver/pom.xml
index e612c0e..4ddd5d0 100644
--- a/bufferserver/pom.xml
+++ b/bufferserver/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC2</version>
+    <version>3.1.0</version>
   </parent>
 
   <artifactId>dt-bufferserver</artifactId>
@@ -28,7 +28,7 @@
     <dependency>
       <groupId>com.datatorrent</groupId>
       <artifactId>dt-common</artifactId>
-      <version>3.1.0-RC2</version>
+      <version>3.1.0</version>
       <type>jar</type>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index 7209d1d..d17c7c9 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC2</version>
+    <version>3.1.0</version>
   </parent>
 
   <artifactId>dt-common</artifactId>
@@ -23,7 +23,7 @@
             <dependency>
               <groupId>com.datatorrent</groupId>
               <artifactId>dt-common</artifactId>
-              <version>3.1.0-RC2</version>
+              <version>3.1.0</version>
             </dependency>
           </oldVersion>
           <newVersion>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
index b565447..f98d775 100644
--- a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
+++ b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
@@ -1,3 +1,4 @@
+
 /**
  * Copyright (C) 2015 DataTorrent, Inc.
  *
@@ -12,6 +13,8 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
+ *
+ * @since 3.1.0
  */
 package com.datatorrent.common.util;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/engine/pom.xml
----------------------------------------------------------------------
diff --git a/engine/pom.xml b/engine/pom.xml
index 47cee1a..e63898b 100644
--- a/engine/pom.xml
+++ b/engine/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC2</version>
+    <version>3.1.0</version>
   </parent>
 
   <artifactId>dt-engine</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java b/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
index 36b7e20..2138ad4 100644
--- a/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
@@ -72,7 +72,7 @@ public class AppPackageTest
   {
     Assert.assertEquals("mydtapp", json.getString("appPackageName"));
     Assert.assertEquals("1.0-SNAPSHOT", json.getString("appPackageVersion"));
-    Assert.assertEquals("3.1.0-RC2", json.getString("dtEngineVersion"));
+    Assert.assertEquals("3.1.0", json.getString("dtEngineVersion"));
     Assert.assertEquals("lib/*.jar", json.getJSONArray("classPath").getString(0));
 
     JSONObject application = json.getJSONArray("applications").getJSONObject(0);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
----------------------------------------------------------------------
diff --git a/engine/src/test/resources/testAppPackage/mydtapp/pom.xml b/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
index c88ed44..9c84eeb 100644
--- a/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
+++ b/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
@@ -13,7 +13,7 @@
 
   <properties>
     <!-- change this if you desire to use a different version of DataTorrent -->
-    <datatorrent.version>3.1.0-RC2</datatorrent.version>
+    <datatorrent.version>3.1.0</datatorrent.version>
     <datatorrent.apppackage.classpath>lib/*.jar</datatorrent.apppackage.classpath>
   </properties>
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ebc83f8c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3cdd106..c5b19bb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
 
   <groupId>com.datatorrent</groupId>
   <artifactId>dt-framework</artifactId>
-  <version>3.1.0-RC2</version>
+  <version>3.1.0</version>
   <packaging>pom</packaging>
 
   <name>Realtime Stream Processing Framework</name>


[20/50] incubator-apex-core git commit: enable semantic versioning

Posted by ch...@apache.org.
enable semantic versioning


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/352cc61d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/352cc61d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/352cc61d

Branch: refs/heads/master
Commit: 352cc61da198533aea3e89223310093a29b63e05
Parents: 66a75e0
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Fri Aug 7 12:50:59 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Fri Aug 7 14:35:22 2015 -0700

----------------------------------------------------------------------
 api/pom.xml    | 40 ++++++++++++++++++++++++++++++++++++++++
 common/pom.xml | 46 ++++++++++++++++++++++++++++++++++++++++++++++
 2 files changed, 86 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/352cc61d/api/pom.xml
----------------------------------------------------------------------
diff --git a/api/pom.xml b/api/pom.xml
index ff3f441..ebb2f8d 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -27,6 +27,46 @@
     </repository>
   </repositories>
 
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>com.github.siom79.japicmp</groupId>
+        <artifactId>japicmp-maven-plugin</artifactId>
+        <version>0.5.1</version>
+        <configuration>
+          <oldVersion>
+            <dependency>
+              <groupId>com.datatorrent</groupId>
+              <artifactId>dt-api</artifactId>
+              <version>3.0.0</version>
+            </dependency>
+          </oldVersion>
+          <newVersion>
+            <file>
+              <path>${project.build.directory}/${project.artifactId}-${project.version}.jar</path>
+            </file>
+          </newVersion>
+          <parameter>
+            <onlyModified>true</onlyModified>
+            <accessModifier>protected</accessModifier>
+            <breakBuildOnModifications>false</breakBuildOnModifications>
+            <breakBuildOnBinaryIncompatibleModifications>true</breakBuildOnBinaryIncompatibleModifications>
+            <onlyBinaryIncompatible>false</onlyBinaryIncompatible>
+            <includeSynthetic>false</includeSynthetic>
+            <ignoreMissingClasses>false</ignoreMissingClasses>
+          </parameter>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>verify</phase>
+            <goals>
+              <goal>cmp</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
   <dependencies>
     <dependency>
       <groupId>org.apache.hadoop</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/352cc61d/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index 7079353..c3166bc 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -12,6 +12,47 @@
   <name>Base Library</name>
   <packaging>jar</packaging>
 
+  <build>
+  <plugins>
+      <plugin>
+        <groupId>com.github.siom79.japicmp</groupId>
+        <artifactId>japicmp-maven-plugin</artifactId>
+        <version>0.5.1</version>
+        <configuration>
+          <oldVersion>
+            <dependency>
+              <groupId>com.datatorrent</groupId>
+              <artifactId>dt-common</artifactId>
+              <version>3.0.0</version>
+            </dependency>
+          </oldVersion>
+          <newVersion>
+            <file>
+              <path>${project.build.directory}/${project.artifactId}-${project.version}.jar</path>
+            </file>
+          </newVersion>
+          <parameter>
+            <onlyModified>true</onlyModified>
+            <accessModifier>protected</accessModifier>
+            <breakBuildOnModifications>false</breakBuildOnModifications>
+            <breakBuildOnBinaryIncompatibleModifications>true</breakBuildOnBinaryIncompatibleModifications>
+            <onlyBinaryIncompatible>false</onlyBinaryIncompatible>
+            <includeSynthetic>false</includeSynthetic>
+            <ignoreMissingClasses>false</ignoreMissingClasses>
+          </parameter>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>verify</phase>
+            <goals>
+              <goal>cmp</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+  </plugins>
+  </build>
+
   <dependencies>
     <dependency>
       <groupId>${project.groupId}</groupId>
@@ -47,5 +88,10 @@
       <artifactId>validation-api</artifactId>
       <version>1.1.0.Final</version>
     </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-core</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
   </dependencies>
 </project>


[16/50] incubator-apex-core git commit: Fixed the schema class missing error

Posted by ch...@apache.org.
Fixed the schema class missing error


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/430aec91
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/430aec91
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/430aec91

Branch: refs/heads/master
Commit: 430aec9166c4ef02a3163b0d39d9e6aa16bae934
Parents: 19d6658
Author: Chandni Singh <ch...@datatorrent.com>
Authored: Thu Aug 6 19:32:43 2015 -0700
Committer: Chandni Singh <ch...@datatorrent.com>
Committed: Thu Aug 6 21:08:45 2015 -0700

----------------------------------------------------------------------
 .../stram/plan/logical/LogicalPlanConfiguration.java           | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/430aec91/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
index d838a2d..46291a8 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
@@ -1135,11 +1135,7 @@ public class LogicalPlanConfiguration {
       String schemaClassName = streamConf.properties.getProperty(STREAM_SCHEMA);
       Class<?> schemaClass = null;
       if (schemaClassName != null) {
-        try {
-          schemaClass = Class.forName(schemaClassName);
-        } catch (ClassNotFoundException e) {
-          throw new ValidationException("schema class not found: " + schemaClassName);
-        }
+        schemaClass = StramUtils.classForName(schemaClassName, Object.class);
       }
 
       if (streamConf.sourceNode != null) {


[40/50] incubator-apex-core git commit: Fix test failures due to reuse of previous checkpoints.

Posted by ch...@apache.org.
Fix test failures due to reuse of previous checkpoints.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/d19fa66e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/d19fa66e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/d19fa66e

Branch: refs/heads/master
Commit: d19fa66edd31e8b8cb481415fcda86bb2c32f6fc
Parents: 76faf86
Author: thomas <th...@datatorrent.com>
Authored: Thu Aug 20 18:59:01 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Thu Aug 20 18:59:01 2015 -0700

----------------------------------------------------------------------
 .../common/util/AsyncFSStorageAgent.java            |  1 +
 .../java/com/datatorrent/stram/StramClient.java     |  1 -
 .../stram/StreamingContainerManagerTest.java        |  2 ++
 .../datatorrent/stram/engine/AtMostOnceTest.java    |  2 +-
 .../stram/engine/ProcessingModeTests.java           | 16 +++++++---------
 .../stram/engine/RecoverableInputOperator.java      | 10 +++++-----
 6 files changed, 16 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d19fa66e/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
index d5de61c..2ab6771 100644
--- a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
+++ b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
@@ -36,6 +36,7 @@ public class AsyncFSStorageAgent extends FSStorageAgent
 
   private boolean syncCheckpoint = false;
 
+  @SuppressWarnings("unused")
   private AsyncFSStorageAgent()
   {
     super();

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d19fa66e/engine/src/main/java/com/datatorrent/stram/StramClient.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StramClient.java b/engine/src/main/java/com/datatorrent/stram/StramClient.java
index 8a8baf3..db36ef6 100644
--- a/engine/src/main/java/com/datatorrent/stram/StramClient.java
+++ b/engine/src/main/java/com/datatorrent/stram/StramClient.java
@@ -54,7 +54,6 @@ import org.apache.log4j.DTLoggerFactory;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.BasicContainerOptConfigurator;
-import com.datatorrent.common.util.FSStorageAgent;
 import com.datatorrent.stram.client.StramClientUtils;
 import com.datatorrent.stram.client.StramClientUtils.ClientRMHelper;
 import com.datatorrent.stram.engine.StreamingContainer;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d19fa66e/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
index 89f2878..bd9699c 100644
--- a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
@@ -727,6 +727,8 @@ public class StreamingContainerManagerTest
     // deploy all containers
     for (Map.Entry<PTContainer, MockContainer> ce : mockContainers.entrySet()) {
       ce.getValue().deploy();
+    }
+    for (Map.Entry<PTContainer, MockContainer> ce : mockContainers.entrySet()) {
       // skip buffer server purge in monitorHeartbeat
       ce.getKey().bufferServerAddress = null;
     }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d19fa66e/engine/src/test/java/com/datatorrent/stram/engine/AtMostOnceTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/AtMostOnceTest.java b/engine/src/test/java/com/datatorrent/stram/engine/AtMostOnceTest.java
index 41e0bd9..1205f30 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/AtMostOnceTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/AtMostOnceTest.java
@@ -51,7 +51,7 @@ public class AtMostOnceTest extends ProcessingModeTests
     Assert.assertTrue("No Duplicates", CollectorOperator.duplicates.isEmpty());
   }
 
-  //@Test
+  @Test
   @Override
   public void testLinearOperatorRecovery() throws Exception
   {

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d19fa66e/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java b/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java
index 0393394..92c057d 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java
@@ -18,16 +18,17 @@ package com.datatorrent.stram.engine;
 import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.BaseOperator;
 
-import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.concurrent.atomic.AtomicBoolean;
+
 import static java.lang.Thread.sleep;
 
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.Rule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -35,11 +36,11 @@ import com.datatorrent.api.*;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.Operator.ProcessingMode;
-
 import com.datatorrent.bufferserver.packet.MessageType;
 import com.datatorrent.bufferserver.util.Codec;
 import com.datatorrent.stram.StramLocalCluster;
 import com.datatorrent.stram.plan.logical.LogicalPlan;
+import com.datatorrent.stram.support.StramTestSupport.TestMeta;
 import com.datatorrent.stram.tuple.EndWindowTuple;
 import com.datatorrent.stram.tuple.Tuple;
 
@@ -48,6 +49,7 @@ import com.datatorrent.stram.tuple.Tuple;
  */
 public class ProcessingModeTests
 {
+  @Rule public TestMeta testMeta = new TestMeta();
   ProcessingMode processingMode;
   int maxTuples = 30;
 
@@ -78,8 +80,7 @@ public class ProcessingModeTests
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
-    String workingDir = new File("target/testLinearInputOperatorRecovery").getAbsolutePath();
-    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
     RecoverableInputOperator rip = dag.addOperator("LongGenerator", RecoverableInputOperator.class);
     rip.setMaximumTuples(maxTuples);
     rip.setSimulateFailure(true);
@@ -102,8 +103,7 @@ public class ProcessingModeTests
     CollectorOperator.duplicates.clear();
 
     LogicalPlan dag = new LogicalPlan();
-    String workingDir = new File("target/testLinearOperatorRecovery").getAbsolutePath();
-    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
@@ -128,8 +128,7 @@ public class ProcessingModeTests
     CollectorOperator.duplicates.clear();
 
     LogicalPlan dag = new LogicalPlan();
-    String workingDir = new File("target/testLinearInlineOperatorsRecovery").getAbsolutePath();
-    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
@@ -203,7 +202,6 @@ public class ProcessingModeTests
       }
     }
 
-    private static final long serialVersionUID = 201404161447L;
   }
 
   public static class MultiInputOperator implements Operator

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d19fa66e/engine/src/test/java/com/datatorrent/stram/engine/RecoverableInputOperator.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/RecoverableInputOperator.java b/engine/src/test/java/com/datatorrent/stram/engine/RecoverableInputOperator.java
index 510fbd5..4cf8274 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/RecoverableInputOperator.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/RecoverableInputOperator.java
@@ -27,7 +27,6 @@ import org.slf4j.LoggerFactory;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.InputOperator;
-import com.datatorrent.api.Operator;
 
 import com.datatorrent.bufferserver.util.Codec;
 
@@ -37,7 +36,7 @@ import com.datatorrent.bufferserver.util.Codec;
 public class RecoverableInputOperator implements InputOperator, com.datatorrent.api.Operator.CheckpointListener
 {
   public final transient DefaultOutputPort<Long> output = new DefaultOutputPort<Long>();
-  long checkpointedWindowId;
+  private long checkpointedWindowId;
   boolean firstRun = true;
   transient boolean first;
   transient long windowId;
@@ -92,7 +91,8 @@ public class RecoverableInputOperator implements InputOperator, com.datatorrent.
   @Override
   public void setup(OperatorContext context)
   {
-    firstRun &= checkpointedWindowId == 0;
+    firstRun = (checkpointedWindowId == 0);
+    logger.debug("firstRun={} checkpointedWindowId={}", firstRun, Codec.getStringWindowId(checkpointedWindowId));
   }
 
   @Override
@@ -105,6 +105,7 @@ public class RecoverableInputOperator implements InputOperator, com.datatorrent.
   {
     if (checkpointedWindowId == 0) {
       checkpointedWindowId = windowId;
+      logger.debug("firstRun={} checkpointedWindowId={}", firstRun, Codec.getStringWindowId(checkpointedWindowId));
     }
 
     logger.debug("{} checkpointed at {}", this, Codec.getStringWindowId(windowId));
@@ -113,8 +114,7 @@ public class RecoverableInputOperator implements InputOperator, com.datatorrent.
   @Override
   public void committed(long windowId)
   {
-    logger.debug("{} committed at {}", this, Codec.getStringWindowId(windowId));
-
+    logger.debug("{} committed at {} firstRun {}, checkpointedWindowId {}", this, Codec.getStringWindowId(windowId), firstRun, Codec.getStringWindowId(checkpointedWindowId));
     if (simulateFailure && firstRun && checkpointedWindowId > 0 && windowId > checkpointedWindowId) {
       throw new RuntimeException("Failure Simulation from " + this);
     }


[43/50] incubator-apex-core git commit: APEX-54 #resolve Added code to copy from local file to HDFS with overwrite option for AsyncFSStorageAgent

Posted by ch...@apache.org.
APEX-54 #resolve Added code to copy from local file to HDFS with overwrite option for AsyncFSStorageAgent


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/be4af0af
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/be4af0af
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/be4af0af

Branch: refs/heads/master
Commit: be4af0af1fe9800c690e98f770c1059ac2168143
Parents: 9d08532
Author: ishark <is...@datatorrent.com>
Authored: Mon Aug 17 18:28:47 2015 -0700
Committer: ishark <is...@datatorrent.com>
Committed: Fri Aug 21 14:28:40 2015 -0700

----------------------------------------------------------------------
 .../common/util/AsyncFSStorageAgent.java        | 50 +++++++++++++++-----
 1 file changed, 39 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/be4af0af/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
index d5de61c..f6077a7 100644
--- a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
+++ b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
@@ -15,19 +15,16 @@
  */
 package com.datatorrent.common.util;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.ObjectStreamException;
+import java.io.*;
+import java.util.EnumSet;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Options;
-import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.IOUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.netlet.util.DTThrowable;
 public class AsyncFSStorageAgent extends FSStorageAgent
 {
   private final transient FileSystem fs;
@@ -85,9 +82,40 @@ public class AsyncFSStorageAgent extends FSStorageAgent
     String operatorIdStr = String.valueOf(operatorId);
     File directory = new File(localBasePath, operatorIdStr);
     String window = Long.toHexString(windowId);
-    Path lPath = new Path(path + Path.SEPARATOR + operatorIdStr + Path.SEPARATOR + System.currentTimeMillis() + TMP_FILE);
-    FileUtil.copy(new File(directory, String.valueOf(windowId)), fs, lPath, true, conf);
-    fileContext.rename(lPath, new Path(path + Path.SEPARATOR + operatorIdStr + Path.SEPARATOR + window), Options.Rename.OVERWRITE);
+    Path lPath = new Path(path + Path.SEPARATOR + operatorIdStr + Path.SEPARATOR + TMP_FILE);
+    File srcFile = new File(directory, String.valueOf(windowId));
+    FSDataOutputStream stream = null;
+    boolean stateSaved = false;
+    try {
+      // Create the temporary file with OverWrite option to avoid dangling lease issue and avoid exception if file already exists
+      stream = fileContext.create(lPath, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), Options.CreateOpts.CreateParent.createParent());
+      InputStream in = null;
+      try {
+        in = new FileInputStream(srcFile);
+        IOUtils.copyBytes(in, stream, conf, false);
+      } finally {
+        IOUtils.closeStream(in);
+      }
+      stateSaved = true;
+    } catch (Throwable t) {
+      logger.debug("while saving {} {}", operatorId, window, t);
+      stateSaved = false;
+      DTThrowable.rethrow(t);
+    } finally {
+      try {
+        if (stream != null) {
+          stream.close();
+        }
+      } catch (IOException ie) {
+        stateSaved = false;
+        throw new RuntimeException(ie);
+      } finally {
+        if (stateSaved) {
+          fileContext.rename(lPath, new Path(path + Path.SEPARATOR + operatorIdStr + Path.SEPARATOR + window), Options.Rename.OVERWRITE);
+        }
+        FileUtil.fullyDelete(srcFile);
+      }
+    }
   }
 
   @Override


[24/50] incubator-apex-core git commit: Merge pull request #122 from chandnisingh/APEX-41-3.1.0

Posted by ch...@apache.org.
Merge pull request #122 from chandnisingh/APEX-41-3.1.0

deleting the test folder quietly

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/d813963f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/d813963f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/d813963f

Branch: refs/heads/master
Commit: d813963f3bc03e9393f6b3a890a035d47df796d4
Parents: b2a606b d57e077
Author: David Yan <da...@datatorrent.com>
Authored: Mon Aug 10 14:30:35 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Mon Aug 10 14:30:35 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/engine/AutoMetricTest.java | 10 ----------
 .../com/datatorrent/stram/support/StramTestSupport.java   |  7 +------
 2 files changed, 1 insertion(+), 16 deletions(-)
----------------------------------------------------------------------



[13/50] incubator-apex-core git commit: Merge pull request #112 from davidyan74/SPOI-5821

Posted by ch...@apache.org.
Merge pull request #112 from davidyan74/SPOI-5821

SPOI-5821 #resolve Removed misleading warning from CLI since now it's…

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/c5d819bd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/c5d819bd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/c5d819bd

Branch: refs/heads/master
Commit: c5d819bdd35752c2232d2d1de4741bc3222e3ec6
Parents: fe5d035 453e820
Author: Thomas Weise <th...@gmail.com>
Authored: Thu Aug 6 16:28:29 2015 -0700
Committer: Thomas Weise <th...@gmail.com>
Committed: Thu Aug 6 16:28:29 2015 -0700

----------------------------------------------------------------------
 engine/src/main/java/com/datatorrent/stram/cli/DTCli.java | 3 ---
 1 file changed, 3 deletions(-)
----------------------------------------------------------------------



[42/50] incubator-apex-core git commit: Merge pull request #135 from tweise/APEX-56

Posted by ch...@apache.org.
Merge pull request #135 from tweise/APEX-56

Remove terminated operators from plan after window is committed.

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/a2f9d2e3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/a2f9d2e3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/a2f9d2e3

Branch: refs/heads/master
Commit: a2f9d2e30760c1a258c90891d6834ec1c4f9a606
Parents: ffedce9 064edf0
Author: gauravgopi123 <ga...@datatorrent.com>
Authored: Fri Aug 21 14:22:57 2015 -0700
Committer: gauravgopi123 <ga...@datatorrent.com>
Committed: Fri Aug 21 14:22:57 2015 -0700

----------------------------------------------------------------------
 .../common/util/AsyncFSStorageAgent.java        |  1 +
 .../java/com/datatorrent/stram/StramClient.java |  1 -
 .../stram/StreamingContainerManager.java        | 49 ++++++++----
 .../stram/plan/physical/PhysicalPlan.java       | 32 +++++---
 .../com/datatorrent/stram/MockContainer.java    |  2 +-
 .../com/datatorrent/stram/StreamCodecTest.java  | 35 +--------
 .../stram/StreamingContainerManagerTest.java    | 83 +++++++++++++++++++-
 .../stram/engine/AtMostOnceTest.java            |  2 +-
 .../stram/engine/ProcessingModeTests.java       | 16 ++--
 .../stram/engine/RecoverableInputOperator.java  | 10 +--
 10 files changed, 152 insertions(+), 79 deletions(-)
----------------------------------------------------------------------



[27/50] incubator-apex-core git commit: Merge pull request #125 from tweise/v3.1.0

Posted by ch...@apache.org.
Merge pull request #125 from tweise/v3.1.0

Rethrow exception so compiler (and others) know about it.

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/8a13585b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/8a13585b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/8a13585b

Branch: refs/heads/master
Commit: 8a13585b38ecbe4f75f03098187ee5c5a719b35b
Parents: d813963 39ef1cf
Author: David Yan <da...@datatorrent.com>
Authored: Tue Aug 11 14:54:45 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Tue Aug 11 14:54:45 2015 -0700

----------------------------------------------------------------------
 .../main/java/com/datatorrent/stram/StreamingContainerManager.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[04/50] incubator-apex-core git commit: APEX-29 #resolve Use DefaultEventLoop.createEventLoop factory

Posted by ch...@apache.org.
APEX-29 #resolve Use DefaultEventLoop.createEventLoop factory


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/8ae64ab6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/8ae64ab6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/8ae64ab6

Branch: refs/heads/master
Commit: 8ae64ab64337c8c259b80661bfe125b64e417c93
Parents: 66a75e0
Author: Vlad Rozov <v....@datatorrent.com>
Authored: Sun Aug 2 11:32:09 2015 -0700
Committer: Vlad Rozov <v....@datatorrent.com>
Committed: Tue Aug 4 14:07:04 2015 -0700

----------------------------------------------------------------------
 api/pom.xml                                                 | 2 +-
 .../java/com/datatorrent/bufferserver/server/Server.java    | 2 +-
 .../main/java/com/datatorrent/bufferserver/util/System.java | 2 +-
 .../com/datatorrent/bufferserver/client/SubscriberTest.java | 4 ++--
 .../com/datatorrent/bufferserver/server/ServerTest.java     | 4 ++--
 .../datatorrent/bufferserver/storage/DiskStorageTest.java   | 9 +++++----
 .../com/datatorrent/stram/engine/StreamingContainer.java    | 2 +-
 .../java/com/datatorrent/stram/stream/FastPublisher.java    | 2 ++
 .../java/com/datatorrent/stram/stream/FastStreamTest.java   | 6 ++++--
 .../java/com/datatorrent/stram/stream/SocketStreamTest.java | 8 ++++----
 10 files changed, 23 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/api/pom.xml
----------------------------------------------------------------------
diff --git a/api/pom.xml b/api/pom.xml
index ff3f441..f04f622 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -74,7 +74,7 @@
     <dependency>
       <groupId>${project.groupId}</groupId>
       <artifactId>netlet</artifactId>
-      <version>1.1.0</version>
+      <version>1.2.0-SNAPSHOT</version>
     </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java
----------------------------------------------------------------------
diff --git a/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java b/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java
index a8adf08..7fb4823 100644
--- a/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java
+++ b/bufferserver/src/main/java/com/datatorrent/bufferserver/server/Server.java
@@ -144,7 +144,7 @@ public class Server implements ServerListener
       port = 0;
     }
 
-    DefaultEventLoop eventloop = new DefaultEventLoop("alone");
+    DefaultEventLoop eventloop = DefaultEventLoop.createEventLoop("alone");
     eventloop.start(null, port, new Server(port));
     new Thread(eventloop).start();
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/bufferserver/src/main/java/com/datatorrent/bufferserver/util/System.java
----------------------------------------------------------------------
diff --git a/bufferserver/src/main/java/com/datatorrent/bufferserver/util/System.java b/bufferserver/src/main/java/com/datatorrent/bufferserver/util/System.java
index ff126cb..e9d6528 100644
--- a/bufferserver/src/main/java/com/datatorrent/bufferserver/util/System.java
+++ b/bufferserver/src/main/java/com/datatorrent/bufferserver/util/System.java
@@ -36,7 +36,7 @@ public class System
       DefaultEventLoop el = eventloops.get(identifier);
       if (el == null) {
         try {
-          eventloops.put(identifier, el = new DefaultEventLoop(identifier));
+          eventloops.put(identifier, el = DefaultEventLoop.createEventLoop(identifier));
         }
         catch (IOException io) {
           throw new RuntimeException(io);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/bufferserver/src/test/java/com/datatorrent/bufferserver/client/SubscriberTest.java
----------------------------------------------------------------------
diff --git a/bufferserver/src/test/java/com/datatorrent/bufferserver/client/SubscriberTest.java b/bufferserver/src/test/java/com/datatorrent/bufferserver/client/SubscriberTest.java
index 3b6b57a..cde4f69 100644
--- a/bufferserver/src/test/java/com/datatorrent/bufferserver/client/SubscriberTest.java
+++ b/bufferserver/src/test/java/com/datatorrent/bufferserver/client/SubscriberTest.java
@@ -52,8 +52,8 @@ public class SubscriberTest
   public static void setupServerAndClients() throws Exception
   {
     try {
-      eventloopServer = new DefaultEventLoop("server");
-      eventloopClient = new DefaultEventLoop("client");
+      eventloopServer = DefaultEventLoop.createEventLoop("server");
+      eventloopClient = DefaultEventLoop.createEventLoop("client");
     }
     catch (IOException ioe) {
       throw new RuntimeException(ioe);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/bufferserver/src/test/java/com/datatorrent/bufferserver/server/ServerTest.java
----------------------------------------------------------------------
diff --git a/bufferserver/src/test/java/com/datatorrent/bufferserver/server/ServerTest.java b/bufferserver/src/test/java/com/datatorrent/bufferserver/server/ServerTest.java
index 600f18c..de26da8 100644
--- a/bufferserver/src/test/java/com/datatorrent/bufferserver/server/ServerTest.java
+++ b/bufferserver/src/test/java/com/datatorrent/bufferserver/server/ServerTest.java
@@ -58,8 +58,8 @@ public class ServerTest
   public static void setupServerAndClients() throws Exception
   {
     try {
-      eventloopServer = new DefaultEventLoop("server");
-      eventloopClient = new DefaultEventLoop("client");
+      eventloopServer = DefaultEventLoop.createEventLoop("server");
+      eventloopClient = DefaultEventLoop.createEventLoop("client");
     }
     catch (IOException ioe) {
       throw new RuntimeException(ioe);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/bufferserver/src/test/java/com/datatorrent/bufferserver/storage/DiskStorageTest.java
----------------------------------------------------------------------
diff --git a/bufferserver/src/test/java/com/datatorrent/bufferserver/storage/DiskStorageTest.java b/bufferserver/src/test/java/com/datatorrent/bufferserver/storage/DiskStorageTest.java
index 08dc5b8..dac996a 100644
--- a/bufferserver/src/test/java/com/datatorrent/bufferserver/storage/DiskStorageTest.java
+++ b/bufferserver/src/test/java/com/datatorrent/bufferserver/storage/DiskStorageTest.java
@@ -16,12 +16,10 @@
 package com.datatorrent.bufferserver.storage;
 
 import java.net.InetSocketAddress;
-import static java.lang.Thread.sleep;
 
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
-import static org.testng.Assert.assertEquals;
 
 import com.datatorrent.bufferserver.packet.BeginWindowTuple;
 import com.datatorrent.bufferserver.packet.EndWindowTuple;
@@ -32,6 +30,9 @@ import com.datatorrent.bufferserver.support.Publisher;
 import com.datatorrent.bufferserver.support.Subscriber;
 import com.datatorrent.netlet.DefaultEventLoop;
 
+import static java.lang.Thread.sleep;
+import static org.testng.Assert.assertEquals;
+
 /**
  *
  */
@@ -49,10 +50,10 @@ public class DiskStorageTest
   @BeforeClass
   public static void setupServerAndClients() throws Exception
   {
-    eventloopServer = new DefaultEventLoop("server");
+    eventloopServer = DefaultEventLoop.createEventLoop("server");
     eventloopServer.start();
 
-    eventloopClient = new DefaultEventLoop("client");
+    eventloopClient = DefaultEventLoop.createEventLoop("client");
     eventloopClient.start();
 
     instance = new Server(0, 1024,8);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java b/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
index 35861f1..9db88ee 100644
--- a/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
+++ b/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
@@ -125,7 +125,7 @@ public class StreamingContainer extends YarnContainerMain
 
   static {
     try {
-      eventloop = new DefaultEventLoop("ProcessWideEventLoop");
+      eventloop = DefaultEventLoop.createEventLoop("ProcessWideEventLoop");
     }
     catch (IOException io) {
       throw new RuntimeException(io);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/engine/src/main/java/com/datatorrent/stram/stream/FastPublisher.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/stream/FastPublisher.java b/engine/src/main/java/com/datatorrent/stram/stream/FastPublisher.java
index 188fb7a..887c363 100644
--- a/engine/src/main/java/com/datatorrent/stram/stream/FastPublisher.java
+++ b/engine/src/main/java/com/datatorrent/stram/stream/FastPublisher.java
@@ -189,6 +189,7 @@ public class FastPublisher extends Kryo implements ClientListener, Stream
       if (!write) {
         key.interestOps(key.interestOps() | SelectionKey.OP_WRITE);
         write = true;
+        key.selector().wakeup();
       }
     }
   }
@@ -484,6 +485,7 @@ public class FastPublisher extends Kryo implements ClientListener, Stream
     if (!write) {
       key.interestOps(key.interestOps() | SelectionKey.OP_WRITE);
       write = true;
+      key.selector().wakeup();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/engine/src/test/java/com/datatorrent/stram/stream/FastStreamTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/stream/FastStreamTest.java b/engine/src/test/java/com/datatorrent/stram/stream/FastStreamTest.java
index e23358b..c7ed83c 100644
--- a/engine/src/test/java/com/datatorrent/stram/stream/FastStreamTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/stream/FastStreamTest.java
@@ -16,7 +16,7 @@
 package com.datatorrent.stram.stream;
 
 import java.io.IOException;
-import static java.lang.Thread.sleep;
+
 import java.net.InetSocketAddress;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -36,6 +36,8 @@ import com.datatorrent.stram.support.StramTestSupport;
 import com.datatorrent.stram.tuple.EndWindowTuple;
 import com.datatorrent.stram.tuple.Tuple;
 
+import static java.lang.Thread.sleep;
+
 /**
  *
  */
@@ -52,7 +54,7 @@ public class FastStreamTest
 
   static {
     try {
-      eventloop = new DefaultEventLoop("StreamTestEventLoop");
+      eventloop = DefaultEventLoop.createEventLoop("StreamTestEventLoop");
     }
     catch (IOException ex) {
       throw new RuntimeException(ex);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/8ae64ab6/engine/src/test/java/com/datatorrent/stram/stream/SocketStreamTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/stream/SocketStreamTest.java b/engine/src/test/java/com/datatorrent/stram/stream/SocketStreamTest.java
index 7702b85..2cdddc5 100644
--- a/engine/src/test/java/com/datatorrent/stram/stream/SocketStreamTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/stream/SocketStreamTest.java
@@ -19,8 +19,6 @@ import com.datatorrent.stram.codec.DefaultStatefulStreamCodec;
 import com.datatorrent.stram.engine.StreamContext;
 import com.datatorrent.stram.engine.SweepableReservoir;
 import com.datatorrent.stram.support.StramTestSupport;
-import com.datatorrent.stram.stream.BufferServerPublisher;
-import com.datatorrent.stram.stream.BufferServerSubscriber;
 import com.datatorrent.stram.tuple.EndWindowTuple;
 import com.datatorrent.stram.tuple.Tuple;
 import com.datatorrent.api.Sink;
@@ -29,13 +27,15 @@ import com.datatorrent.bufferserver.server.Server;
 import com.datatorrent.netlet.DefaultEventLoop;
 import com.datatorrent.netlet.EventLoop;
 import java.io.IOException;
-import static java.lang.Thread.sleep;
+
 import java.net.InetSocketAddress;
 import java.util.concurrent.atomic.AtomicInteger;
 import org.junit.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.lang.Thread.sleep;
+
 /**
  *
  */
@@ -49,7 +49,7 @@ public class SocketStreamTest
 
   static {
     try {
-      eventloop = new DefaultEventLoop("StreamTestEventLoop");
+      eventloop = DefaultEventLoop.createEventLoop("StreamTestEventLoop");
     }
     catch (IOException ex) {
       throw new RuntimeException(ex);


[26/50] incubator-apex-core git commit: Remove warnings and obsolete internal attributes.

Posted by ch...@apache.org.
Remove warnings and obsolete internal attributes.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/485c9ac8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/485c9ac8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/485c9ac8

Branch: refs/heads/master
Commit: 485c9ac86735bb775589a1dcfc3e9c716a167c39
Parents: 39ef1cf
Author: thomas <th...@datatorrent.com>
Authored: Tue Aug 11 10:51:07 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Tue Aug 11 10:51:07 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/plan/logical/LogicalPlan.java     | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/485c9ac8/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
index 9bfc2bd..d140d17 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
@@ -49,7 +49,6 @@ import com.datatorrent.common.metric.MetricsAggregator;
 import com.datatorrent.common.metric.SingleMetricAggregator;
 import com.datatorrent.common.metric.sum.DoubleSumAggregator;
 import com.datatorrent.common.metric.sum.LongSumAggregator;
-import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.common.util.FSStorageAgent;
 import com.datatorrent.stram.engine.DefaultUnifier;
 import com.datatorrent.stram.engine.Slider;
@@ -103,8 +102,6 @@ public class LogicalPlan implements Serializable, DAG
   public static Attribute<Long> RM_TOKEN_LIFE_TIME = new Attribute<Long>(YarnConfiguration.DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT);
   public static Attribute<String> KEY_TAB_FILE = new Attribute<String>((String) null, new StringCodec.String2String());
   public static Attribute<Double> TOKEN_REFRESH_ANTICIPATORY_FACTOR = new Attribute<Double>(0.7);
-  public static Attribute<String> LICENSE = new Attribute<String>((String) null, new StringCodec.String2String());
-  public static Attribute<String> LICENSE_ROOT = new Attribute<String>((String) null, new StringCodec.String2String());
   /**
    * Comma separated list of jar file dependencies to be deployed with the application.
    * The launcher will combine the list with built-in dependencies and those specified
@@ -897,6 +894,7 @@ public class LogicalPlan implements Serializable, DAG
   }
 
   @Override
+  @SuppressWarnings("unchecked")
   public <T> StreamMeta addStream(String id, Operator.OutputPort<? extends T> source, Operator.InputPort<? super T>... sinks)
   {
     StreamMeta s = addStream(id);


[11/50] incubator-apex-core git commit: Deprecated name property from BaseOperator

Posted by ch...@apache.org.
Deprecated name property from BaseOperator


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/fe5d0356
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/fe5d0356
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/fe5d0356

Branch: refs/heads/master
Commit: fe5d03560804a151e4c415939f3ca7edbd686b15
Parents: 19d6658
Author: Timothy Farkas <ti...@datatorrent.com>
Authored: Mon Aug 3 14:16:52 2015 -0700
Committer: Timothy Farkas <ti...@datatorrent.com>
Committed: Thu Aug 6 09:37:59 2015 -0700

----------------------------------------------------------------------
 .../datatorrent/common/util/BaseOperator.java   |  2 +
 engine/pom.xml                                  | 10 ++-
 .../stram/plan/logical/LogicalPlan.java         |  6 +-
 .../com/datatorrent/stram/StreamCodecTest.java  | 45 +++++------
 .../stram/StreamingContainerManagerTest.java    | 12 +--
 .../datatorrent/stram/plan/LogicalPlanTest.java | 16 ++--
 .../stram/plan/physical/PhysicalPlanTest.java   |  9 +--
 .../stram/webapp/OperatorDiscoveryTest.java     | 78 ++++++++++----------
 8 files changed, 91 insertions(+), 87 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/fe5d0356/common/src/main/java/com/datatorrent/common/util/BaseOperator.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/BaseOperator.java b/common/src/main/java/com/datatorrent/common/util/BaseOperator.java
index 0c2f8b3..f653d14 100644
--- a/common/src/main/java/com/datatorrent/common/util/BaseOperator.java
+++ b/common/src/main/java/com/datatorrent/common/util/BaseOperator.java
@@ -33,6 +33,7 @@ public class BaseOperator implements Operator
   /**
    * @return the name property of the operator.
    */
+  @Deprecated
   public String getName()
   {
     return name;
@@ -43,6 +44,7 @@ public class BaseOperator implements Operator
    *
    * @param name
    */
+  @Deprecated
   public void setName(String name)
   {
     this.name = name;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/fe5d0356/engine/pom.xml
----------------------------------------------------------------------
diff --git a/engine/pom.xml b/engine/pom.xml
index 1e6a7ed..c91265c 100644
--- a/engine/pom.xml
+++ b/engine/pom.xml
@@ -229,9 +229,15 @@
     </dependency>
     <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
-      <version>1.8.5</version>
+      <artifactId>mockito-core</artifactId>
+      <version>1.10.19</version>
       <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.hamcrest</groupId>
+          <artifactId>hamcrest-core</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
       <groupId>net.lingala.zip4j</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/fe5d0356/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
index fc182cd..9bfc2bd 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
@@ -854,10 +854,6 @@ public class LogicalPlan implements Serializable, DAG
   @Override
   public <T extends Operator> T addOperator(String name, T operator)
   {
-    // TODO: optional interface to provide contextual information to instance
-    if (operator instanceof BaseOperator) {
-      ((BaseOperator)operator).setName(name);
-    }
     if (operators.containsKey(name)) {
       if (operators.get(name) == (Object)operator) {
         return operator;
@@ -1219,7 +1215,7 @@ public class LogicalPlan implements Serializable, DAG
       }
     }
 
-    // Validate root operators are input operators 
+    // Validate root operators are input operators
     for (OperatorMeta om : this.rootOperators) {
       if (!(om.getOperator() instanceof InputOperator)) {
         throw new ValidationException(String.format("Root operator: %s is not a Input operator",

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/fe5d0356/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java b/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java
index 046425f..9726e65 100644
--- a/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java
@@ -73,14 +73,14 @@ public class StreamCodecTest
     LogicalPlan.OperatorMeta n2meta = dag.getMeta(node2);
     LogicalPlan.OperatorMeta n3meta = dag.getMeta(node3);
 
-    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, node1.getName(), dnm);
+    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, dnm);
 
     OperatorDeployInfo.OutputDeployInfo n1odi = getOutputDeployInfo(n1di, n1meta.getMeta(node1.outport1));
     String id = n1meta.getName() + " " + n1odi.portName;
     Assert.assertEquals("number stream codecs " + id, n1odi.streamCodecs.size(), 1);
     Assert.assertTrue("No user set stream codec", n1odi.streamCodecs.containsValue(null));
 
-    OperatorDeployInfo n2di = getSingleOperatorDeployInfo(node2, node2.getName(), dnm);
+    OperatorDeployInfo n2di = getSingleOperatorDeployInfo(node2, dnm);
 
     OperatorDeployInfo.InputDeployInfo n2idi = getInputDeployInfo(n2di, n2meta.getMeta(node2.inport1));
     id = n2meta.getName() + " " + n2idi.portName;
@@ -93,7 +93,7 @@ public class StreamCodecTest
     checkPresentStreamCodec(n3meta, node3.inport1, n2odi.streamCodecs, id, plan);
 
 
-    OperatorDeployInfo n3di = getSingleOperatorDeployInfo(node3, node3.getName(), dnm);
+    OperatorDeployInfo n3di = getSingleOperatorDeployInfo(node3, dnm);
 
     OperatorDeployInfo.InputDeployInfo n3idi = getInputDeployInfo(n3di, n3meta.getMeta(node3.inport1));
     id = n3meta.getName() + " " + n3idi.portName;
@@ -139,12 +139,12 @@ public class StreamCodecTest
       StreamingContainerManagerTest.assignContainer(dnm, "container" + (i + 1));
     }
 
-    getSingleOperatorDeployInfo(node1, node1.getName(), dnm);
-    getSingleOperatorDeployInfo(node2, node2.getName(), dnm);
-    getSingleOperatorDeployInfo(node3, node3.getName(), dnm);
-    getSingleOperatorDeployInfo(node4, node4.getName(), dnm);
-    getSingleOperatorDeployInfo(node5, node5.getName(), dnm);
-    getSingleOperatorDeployInfo(node6, node6.getName(), dnm);
+    getSingleOperatorDeployInfo(node1, dnm);
+    getSingleOperatorDeployInfo(node2, dnm);
+    getSingleOperatorDeployInfo(node3, dnm);
+    getSingleOperatorDeployInfo(node4, dnm);
+    getSingleOperatorDeployInfo(node5, dnm);
+    getSingleOperatorDeployInfo(node6, dnm);
     Assert.assertEquals("number of stream codec identifiers", 3, plan.getStreamCodecIdentifiers().size());
   }
 
@@ -180,14 +180,14 @@ public class StreamCodecTest
     LogicalPlan.OperatorMeta n2meta = dag.getMeta(node2);
     LogicalPlan.OperatorMeta n3meta = dag.getMeta(node3);
 
-    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, node1.getName(), dnm);
+    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, dnm);
 
     OperatorDeployInfo.OutputDeployInfo n1odi = getOutputDeployInfo(n1di, n1meta.getMeta(node1.outport1));
     String id = n1meta.getName() + " " + n1odi.portName;
     Assert.assertEquals("number stream codecs " + id, n1odi.streamCodecs.size(), 1);
     checkPresentStreamCodec(n2meta, node2.inportWithCodec, n1odi.streamCodecs, id, plan);
 
-    OperatorDeployInfo n2di = getSingleOperatorDeployInfo(node2, node2.getName(), dnm);
+    OperatorDeployInfo n2di = getSingleOperatorDeployInfo(node2, dnm);
 
     OperatorDeployInfo.InputDeployInfo n2idi = getInputDeployInfo(n2di, n2meta.getMeta(node2.inportWithCodec));
     id = n2meta.getName() + " " + n2idi.portName;
@@ -199,7 +199,7 @@ public class StreamCodecTest
     Assert.assertEquals("number stream codecs " + id, n2odi.streamCodecs.size(), 1);
     checkPresentStreamCodec(n3meta, node3.inportWithCodec, n2odi.streamCodecs, id, plan);
 
-    OperatorDeployInfo n3di = getSingleOperatorDeployInfo(node3, node3.getName(), dnm);
+    OperatorDeployInfo n3di = getSingleOperatorDeployInfo(node3, dnm);
 
     OperatorDeployInfo.InputDeployInfo n3idi = getInputDeployInfo(n3di, n3meta.getMeta(node3.inportWithCodec));
     id = n3meta.getName() + " " + n3idi.portName;
@@ -238,7 +238,7 @@ public class StreamCodecTest
     LogicalPlan.OperatorMeta n1meta = dag.getMeta(node1);
     LogicalPlan.OperatorMeta n2meta = dag.getMeta(node2);
 
-    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, node1.getName(), dnm);
+    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, dnm);
 
     OperatorDeployInfo.OutputDeployInfo n1odi = getOutputDeployInfo(n1di, n1meta.getMeta(node1.outport1));
     String id = n1meta.getName() + " " + n1odi.portName;
@@ -449,21 +449,21 @@ public class StreamCodecTest
     LogicalPlan.OperatorMeta n2meta = dag.getMeta(node2);
     LogicalPlan.OperatorMeta n3meta = dag.getMeta(node3);
 
-    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, node1.getName(), dnm);
+    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, dnm);
 
     OperatorDeployInfo.OutputDeployInfo n1odi = getOutputDeployInfo(n1di, n1meta.getMeta(node1.outport1));
     String id = n1meta.getName() + " " + n1odi.portName;
     Assert.assertEquals("number stream codecs " + id, n1odi.streamCodecs.size(), 1);
     checkPresentStreamCodec(n2meta, node2.inport1, n1odi.streamCodecs, id, plan);
 
-    OperatorDeployInfo n2di = getSingleOperatorDeployInfo(node2, node2.getName(), dnm);
+    OperatorDeployInfo n2di = getSingleOperatorDeployInfo(node2, dnm);
 
     OperatorDeployInfo.InputDeployInfo n2idi = getInputDeployInfo(n2di, n2meta.getMeta(node2.inport1));
     id = n2meta.getName() + " " + n2idi.portName;
     Assert.assertEquals("number stream codecs " + id, n2idi.streamCodecs.size(), 1);
     checkPresentStreamCodec(n2meta, node2.inport1, n2idi.streamCodecs, id, plan);
 
-    OperatorDeployInfo n3di = getSingleOperatorDeployInfo(node3, node3.getName(), dnm);
+    OperatorDeployInfo n3di = getSingleOperatorDeployInfo(node3, dnm);
 
     OperatorDeployInfo.InputDeployInfo n3idi = getInputDeployInfo(n3di, n3meta.getMeta(node3.inport1));
     id = n3meta.getName() + " " + n3idi.portName;
@@ -584,7 +584,7 @@ public class StreamCodecTest
     LogicalPlan.OperatorMeta n2meta = dag.getMeta(node2);
     LogicalPlan.OperatorMeta n3meta = dag.getMeta(node3);
 
-    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, node1.getName(), dnm);
+    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, dnm);
 
     OperatorDeployInfo.OutputDeployInfo n1odi = getOutputDeployInfo(n1di, n1meta.getMeta(node1.outport1));
     String id = n1meta.getName() + " " + n1odi.portName;
@@ -592,14 +592,14 @@ public class StreamCodecTest
     checkPresentStreamCodec(n2meta, node2.inport1, n1odi.streamCodecs, id, plan);
     checkPresentStreamCodec(n3meta, node3.inport1, n1odi.streamCodecs, id, plan);
 
-    OperatorDeployInfo n2di = getSingleOperatorDeployInfo(node2, node2.getName(), dnm);
+    OperatorDeployInfo n2di = getSingleOperatorDeployInfo(node2, dnm);
 
     OperatorDeployInfo.InputDeployInfo n2idi = getInputDeployInfo(n2di, n2meta.getMeta(node2.inport1));
     id = n2meta.getName() + " " + n2idi.portName;
     Assert.assertEquals("number stream codecs " + id, n2idi.streamCodecs.size(), 1);
     checkPresentStreamCodec(n2meta, node2.inport1, n2idi.streamCodecs, id, plan);
 
-    OperatorDeployInfo n3di = getSingleOperatorDeployInfo(node3, node3.getName(), dnm);
+    OperatorDeployInfo n3di = getSingleOperatorDeployInfo(node3, dnm);
 
     OperatorDeployInfo.InputDeployInfo n3idi = getInputDeployInfo(n3di, n3meta.getMeta(node3.inport1));
     id = n3meta.getName() + " " + n3idi.portName;
@@ -855,14 +855,14 @@ public class StreamCodecTest
 
     Assert.assertNotNull("non inline operator is null", nonInlineOperator);
 
-    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, node1.getName(), dnm);
+    OperatorDeployInfo n1di = getSingleOperatorDeployInfo(node1, dnm);
 
     OperatorDeployInfo.OutputDeployInfo n1odi = getOutputDeployInfo(n1di, n1meta.getMeta(node1.outport1));
     String id = n1meta.getName() + " " + n1odi.portName;
     Assert.assertEquals("number stream codecs " + id, n1odi.streamCodecs.size(), 1);
     checkPresentStreamCodec(nonInlineMeta, niInputPort, n1odi.streamCodecs, id, plan);
 
-    OperatorDeployInfo odi = getSingleOperatorDeployInfo(nonInlineOperator, nonInlineOperator.getName(), dnm);
+    OperatorDeployInfo odi = getSingleOperatorDeployInfo(nonInlineOperator, dnm);
 
     OperatorDeployInfo.InputDeployInfo idi = getInputDeployInfo(odi, nonInlineMeta.getMeta(niInputPort));
     id = nonInlineMeta.getName() + " " + idi.portName;
@@ -1218,9 +1218,10 @@ public class StreamCodecTest
     Assert.assertEquals("stream codec not same " + id, opStreamCodecInfo, streamCodecInfo);
   }
 
-  private OperatorDeployInfo getSingleOperatorDeployInfo(Operator oper, String id, StreamingContainerManager scm)
+  private OperatorDeployInfo getSingleOperatorDeployInfo(Operator oper, StreamingContainerManager scm)
   {
     LogicalPlan dag = scm.getLogicalPlan();
+    String id = dag.getMeta(oper).toString();
     PhysicalPlan plan = scm.getPhysicalPlan();
     List<PTOperator> operators = plan.getOperators(dag.getMeta(oper));
     Assert.assertEquals("number of operators " + id, 1, operators.size());

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/fe5d0356/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
index ba15a78..38a54f0 100644
--- a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
@@ -188,8 +188,8 @@ public class StreamingContainerManagerTest {
     Assert.assertEquals("number operators assigned to container", 3, c2.size());
     OperatorDeployInfo o2DI = getNodeDeployInfo(c2, dag.getMeta(o2));
     OperatorDeployInfo o3DI = getNodeDeployInfo(c2, dag.getMeta(o3));
-    Assert.assertNotNull(o2.getName() + " assigned to " + sca2.container.getExternalId(), o2DI);
-    Assert.assertNotNull(o3.getName() + " assigned to " + sca2.container.getExternalId(), o3DI);
+    Assert.assertNotNull(dag.getMeta(o2) + " assigned to " + sca2.container.getExternalId(), o2DI);
+    Assert.assertNotNull(dag.getMeta(o3) + " assigned to " + sca2.container.getExternalId(), o3DI);
 
     Assert.assertTrue("The buffer server memory for container 1", 256 == sca1.getInitContext().getValue(ContainerContext.BUFFER_SERVER_MB));
     Assert.assertTrue("The buffer server memory for container 2", 0 == sca2.getInitContext().getValue(ContainerContext.BUFFER_SERVER_MB));
@@ -218,7 +218,7 @@ public class StreamingContainerManagerTest {
 
     // THREAD_LOCAL o4.inport1
     OperatorDeployInfo o4DI = getNodeDeployInfo(c2, dag.getMeta(o4));
-    Assert.assertNotNull(o4.getName() + " assigned to " + sca2.container.getExternalId(), o4DI);
+    Assert.assertNotNull(dag.getMeta(o4) + " assigned to " + sca2.container.getExternalId(), o4DI);
     InputDeployInfo c2o4i1 = getInputDeployInfo(o4DI, "o3.outport1");
     Assert.assertNotNull("input from o3.outport1", c2o4i1);
     Assert.assertEquals("portName " + c2o4i1, GenericTestOperator.IPORT1, c2o4i1.portName);
@@ -271,7 +271,7 @@ public class StreamingContainerManagerTest {
     StreamingContainerAgent sca1 = dnm.getContainerAgent(c.getExternalId());
     List<OperatorDeployInfo> c1 = getDeployInfo(sca1);
     Assert.assertEquals("number operators assigned to container", 1, c1.size());
-    Assert.assertTrue(node2.getName() + " assigned to " + sca1.container.getExternalId(), containsNodeContext(c1, dag.getMeta(node1)));
+    Assert.assertTrue(dag.getMeta(node2) + " assigned to " + sca1.container.getExternalId(), containsNodeContext(c1, dag.getMeta(node1)));
 
     List<PTOperator> o2Partitions = plan.getOperators(dag.getMeta(node2));
     Assert.assertEquals("number partitions", TestStaticPartitioningSerDe.partitions.length, o2Partitions.size());
@@ -280,7 +280,7 @@ public class StreamingContainerManagerTest {
       String containerId = o2Partitions.get(i).getContainer().getExternalId();
       List<OperatorDeployInfo> cc = getDeployInfo(dnm.getContainerAgent(containerId));
       Assert.assertEquals("number operators assigned to container", 1, cc.size());
-      Assert.assertTrue(node2.getName() + " assigned to " + containerId, containsNodeContext(cc, dag.getMeta(node2)));
+      Assert.assertTrue(dag.getMeta(node2) + " assigned to " + containerId, containsNodeContext(cc, dag.getMeta(node2)));
 
       // n1n2 in, mergeStream out
       OperatorDeployInfo ndi = cc.get(0);
@@ -338,7 +338,7 @@ public class StreamingContainerManagerTest {
     Assert.assertEquals("number operators " + cmerge, 1, cmerge.size());
 
     OperatorDeployInfo node3DI = getNodeDeployInfo(cmerge,  dag.getMeta(node3));
-    Assert.assertNotNull(node3.getName() + " assigned", node3DI);
+    Assert.assertNotNull(dag.getMeta(node3) + " assigned", node3DI);
     Assert.assertEquals("inputs " + node3DI, 1, node3DI.inputs.size());
     InputDeployInfo node3In = node3DI.inputs.get(0);
     Assert.assertEquals("streamName " + node3In, n2n3.getName(), node3In.declaredStreamId);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/fe5d0356/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java b/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java
index 24a9031..5bda8ee 100644
--- a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanTest.java
@@ -89,16 +89,16 @@ public class LogicalPlanTest {
      dag.findStronglyConnected(dag.getMeta(operator7), cycles);
      assertEquals("operator self reference", 1, cycles.size());
      assertEquals("operator self reference", 1, cycles.get(0).size());
-     assertEquals("operator self reference", operator7.getName(), cycles.get(0).get(0));
+     assertEquals("operator self reference", dag.getMeta(operator7).getName(), cycles.get(0).get(0));
 
      // 3 operator cycle
      cycles.clear();
      dag.findStronglyConnected(dag.getMeta(operator4), cycles);
      assertEquals("3 operator cycle", 1, cycles.size());
      assertEquals("3 operator cycle", 3, cycles.get(0).size());
-     assertTrue("operator2", cycles.get(0).contains(operator2.getName()));
-     assertTrue("operator3", cycles.get(0).contains(operator3.getName()));
-     assertTrue("operator4", cycles.get(0).contains(operator4.getName()));
+     assertTrue("operator2", cycles.get(0).contains(dag.getMeta(operator2).getName()));
+     assertTrue("operator3", cycles.get(0).contains(dag.getMeta(operator3).getName()));
+     assertTrue("operator4", cycles.get(0).contains(dag.getMeta(operator4).getName()));
 
      try {
        dag.validate();
@@ -294,7 +294,7 @@ public class LogicalPlanTest {
       Assert.fail("should throw ConstraintViolationException");
     } catch (ConstraintViolationException e) {
       Assert.assertEquals("violation details", constraintViolations, e.getConstraintViolations());
-      String expRegex = ".*ValidationTestOperator\\{name=testOperator}, propertyPath='intField1', message='must be greater than or equal to 2',.*value=1}]";
+      String expRegex = ".*ValidationTestOperator\\{name=null}, propertyPath='intField1', message='must be greater than or equal to 2',.*value=1}]";
       Assert.assertThat("exception message", e.getMessage(), RegexMatcher.matches(expRegex));
     }
 
@@ -396,7 +396,7 @@ public class LogicalPlanTest {
       dag.validate();
       Assert.fail("should raise operator is not partitionable for operator1");
     } catch (ValidationException e) {
-      Assert.assertEquals("", "Operator " + operator.getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
+      Assert.assertEquals("", "Operator " + dag.getMeta(operator).getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
     }
 
     dag.setAttribute(operator, OperatorContext.PARTITIONER, null);
@@ -406,7 +406,7 @@ public class LogicalPlanTest {
       dag.validate();
       Assert.fail("should raise operator is not partitionable for operator1");
     } catch (ValidationException e) {
-      Assert.assertEquals("", "Operator " + operator.getName() + " is not partitionable but PARTITION_PARALLEL attribute is set", e.getMessage());
+      Assert.assertEquals("", "Operator " + dag.getMeta(operator).getName() + " is not partitionable but PARTITION_PARALLEL attribute is set", e.getMessage());
     }
 
     dag.setInputPortAttribute(operator.input1, PortContext.PARTITION_PARALLEL, false);
@@ -419,7 +419,7 @@ public class LogicalPlanTest {
       dag.validate();
       Assert.fail("should raise operator is not partitionable for operator2");
     } catch (ValidationException e) {
-      Assert.assertEquals("Operator " + operator2.getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
+      Assert.assertEquals("Operator " + dag.getMeta(operator2).getName() + " provides partitioning capabilities but the annotation on the operator class declares it non partitionable!", e.getMessage());
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/fe5d0356/engine/src/test/java/com/datatorrent/stram/plan/physical/PhysicalPlanTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/physical/PhysicalPlanTest.java b/engine/src/test/java/com/datatorrent/stram/plan/physical/PhysicalPlanTest.java
index 9382a4b..ccf930f 100644
--- a/engine/src/test/java/com/datatorrent/stram/plan/physical/PhysicalPlanTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/plan/physical/PhysicalPlanTest.java
@@ -180,7 +180,6 @@ public class PhysicalPlanTest
         p.getPartitionKeys().put(this.inport1, lpks);
         p.getPartitionKeys().put(this.inportWithCodec, lpks);
         p.getPartitionedInstance().pks = p.getPartitionKeys().values().toString();
-        p.getPartitionedInstance().setName(p.getPartitionKeys().values().toString());
         newPartitions.add(p);
       }
 
@@ -252,7 +251,7 @@ public class PhysicalPlanTest
     dag.addStream("node1.outport1", node1.outport1, node2.inport2, node2.inport1);
 
     int initialPartitionCount = 5;
-    OperatorMeta node2Decl = dag.getOperatorMeta(node2.getName());
+    OperatorMeta node2Decl = dag.getMeta(node2);
     node2Decl.getAttributes().put(OperatorContext.PARTITIONER, new StatelessPartitioner<GenericTestOperator>(initialPartitionCount));
 
     PhysicalPlan plan = new PhysicalPlan(dag, new TestPlanContext());
@@ -350,7 +349,7 @@ public class PhysicalPlanTest
 
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 2);
 
-    OperatorMeta o2Meta = dag.getOperatorMeta(o2.getName());
+    OperatorMeta o2Meta = dag.getMeta(o2);
     o2Meta.getAttributes().put(OperatorContext.STATS_LISTENERS,
                                Lists.newArrayList((StatsListener) new PartitionLoadWatch(0, 5)));
     o2Meta.getAttributes().put(OperatorContext.PARTITIONER, new StatelessPartitioner<GenericTestOperator>(1));
@@ -441,7 +440,7 @@ public class PhysicalPlanTest
   public void testInputOperatorPartitioning() {
     LogicalPlan dag = new LogicalPlan();
     TestInputOperator<Object> o1 = dag.addOperator("o1", new TestInputOperator<Object>());
-    OperatorMeta o1Meta = dag.getOperatorMeta(o1.getName());
+    OperatorMeta o1Meta = dag.getMeta(o1);
     dag.setAttribute(o1, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{new PartitioningTest.PartitionLoadWatch()}));
     dag.setAttribute(o1, OperatorContext.PARTITIONER, new StatelessPartitioner<TestInputOperator<Object>>(2));
 
@@ -509,7 +508,7 @@ public class PhysicalPlanTest
 
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 2);
 
-    OperatorMeta node2Meta = dag.getOperatorMeta(o2.getName());
+    OperatorMeta node2Meta = dag.getMeta(o2);
     node2Meta.getAttributes().put(OperatorContext.STATS_LISTENERS,
                                   Lists.newArrayList((StatsListener) new PartitionLoadWatch(3, 5)));
     node2Meta.getAttributes().put(OperatorContext.PARTITIONER, new StatelessPartitioner<GenericTestOperator>(8));

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/fe5d0356/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
index ad915c8..8baa08a 100644
--- a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
@@ -57,7 +57,7 @@ import com.google.common.collect.Lists;
 public class OperatorDiscoveryTest
 {
 //  private static final Logger LOG = LoggerFactory.getLogger(OperatorDiscoveryTest.class);
-  
+
   public static class GenericClassBase<T> extends BaseOperator
   {
     private int A;
@@ -271,7 +271,7 @@ public class OperatorDiscoveryTest
   @Test
   public void testPropertyDiscovery() throws Exception
   {
-    
+
     String[] classFilePath = getClassFileInClasspath();
     OperatorDiscoverer od = new OperatorDiscoverer(classFilePath);
     od.buildTypeGraph();
@@ -343,8 +343,8 @@ public class OperatorDiscoveryTest
     props = desc.getJSONArray("properties");
     genericArray = getJSONProperty(props, "genericArray");
     Assert.assertEquals(debug + "type " + genericArray, String[].class.getName(), genericArray.get("type"));
-    
-    
+
+
     // Test complicated Type Variable override in Hierarchy
     desc = od.describeClassByASM(SubSubClass.class.getName());
     props = desc.getJSONArray("properties");
@@ -554,9 +554,9 @@ public class OperatorDiscoveryTest
         return false;
       return true;
     }
-    
-    
-    
+
+
+
 
   }
 
@@ -573,7 +573,7 @@ public class OperatorDiscoveryTest
     private long longProp;
     private double doubleProp;
     private boolean booleanProp;
-    
+
     private Integer integerProp;
     private List<String> stringList;
     private List<Structured> nestedList;
@@ -612,39 +612,39 @@ public class OperatorDiscoveryTest
     {
       return mProp;
     }
-    
+
     public String getAlias()
     {
       return realName;
     }
-    
+
     public void setAlias(String alias)
     {
       realName = alias;
     }
-    
+
     public String getGetterOnly()
     {
       return getterOnly;
     }
-    
-    
+
+
     public URI getUri()
     {
       return uri;
     }
-    
+
     public void setUri(URI uri)
     {
       this.uri = uri;
     }
-    
-    
+
+
     public void setIntegerProp(Integer integerProp)
     {
       this.integerProp = integerProp;
     }
-    
+
     public Integer getIntegerProp()
     {
       return integerProp;
@@ -734,7 +734,7 @@ public class OperatorDiscoveryTest
     {
       return stringArray;
     }
-    
+
     public void setStringArray(String[] stringArray)
     {
       this.stringArray = stringArray;
@@ -858,15 +858,15 @@ public class OperatorDiscoveryTest
   static class ExtendedOperator extends TestOperator<String, Map<String, Number>>
   {
   }
-  
+
   public static class BaseClass<A, B, C>
   {
     private A a;
-    
+
     private B b;
 
     private C c;
-    
+
     public void setA(A a)
     {
       this.a = a;
@@ -875,12 +875,12 @@ public class OperatorDiscoveryTest
     {
       this.b = b;
     }
-    
+
     public A getA()
     {
       return a;
     }
-    
+
     public B getB()
     {
       return b;
@@ -890,7 +890,7 @@ public class OperatorDiscoveryTest
     {
       this.c = c;
     }
-    
+
     public C getC()
     {
       return c;
@@ -900,28 +900,28 @@ public class OperatorDiscoveryTest
   public static class SubClass<D, A extends Number> extends BaseClass<Number, A, D>
   {
     private D d;
-    
+
     public void setD(D d)
     {
       this.d = d;
     }
-    
+
     public D getD()
     {
       return d;
     }
-    
+
   }
 
   public static class SubSubClass<E extends Runnable> extends SubClass<List<String>, Long>
   {
     private E e;
-    
+
     public void setE(E e)
     {
       this.e = e;
     }
-    
+
     public E getE()
     {
       return e;
@@ -975,7 +975,7 @@ public class OperatorDiscoveryTest
     Assert.assertArrayEquals(ah.intArray, clone.intArray);
 
   }
-  
+
   @Test
   public void testLogicalPlanConfiguration() throws Exception
   {
@@ -995,13 +995,13 @@ public class OperatorDiscoveryTest
     ObjectMapper mapper = ObjectMapperFactory.getOperatorValueSerializer();
     String s = mapper.writeValueAsString(bean);
 //    LOG.debug(new JSONObject(s).toString(2));
-    // 
+    //
     Assert.assertTrue("Shouldn't contain field 'realName' !", !s.contains("realName"));
     Assert.assertTrue("Should contain property 'alias' !", s.contains("alias"));
     Assert.assertTrue("Shouldn't contain property 'getterOnly' !", !s.contains("getterOnly"));
     JSONObject jsonObj = new JSONObject(s);
-    
-    // create the json dag representation 
+
+    // create the json dag representation
     JSONObject jsonPlan = new JSONObject();
     jsonPlan.put("streams", new JSONArray());
     JSONObject jsonOper = new JSONObject();
@@ -1009,17 +1009,17 @@ public class OperatorDiscoveryTest
     jsonOper.put("class", TestOperator.class.getName());
     jsonOper.put("properties", jsonObj);
     jsonPlan.put("operators", new JSONArray(Lists.newArrayList(jsonOper)));
-    
-    
+
+
     Configuration conf = new Configuration(false);
     LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(conf);
-    // create logical plan from the json 
+    // create logical plan from the json
     LogicalPlan lp = lpc.createFromJson(jsonPlan, "jsontest");
     OperatorMeta om = lp.getOperatorMeta("Test Operator");
     Assert.assertTrue(om.getOperator() instanceof TestOperator);
     @SuppressWarnings("rawtypes")
     TestOperator beanBack = (TestOperator) om.getOperator();
-    
+
     // The operator deserialized back from json should be same as original operator
     Assert.assertEquals(bean.map, beanBack.map);
     Assert.assertArrayEquals(bean.stringArray, beanBack.stringArray);
@@ -1031,8 +1031,8 @@ public class OperatorDiscoveryTest
     Assert.assertEquals(bean.booleanProp, beanBack.booleanProp);
     Assert.assertEquals(bean.realName, beanBack.realName);
     Assert.assertEquals(bean.getterOnly, beanBack.getterOnly);
-    
-    
+
+
   }
 
   public static class SchemaRequiredOperator extends BaseOperator implements InputOperator


[32/50] incubator-apex-core git commit: Merge branch 'SPOI-5338-3.1.0' of https://github.com/ishark/Apex into ishark-SPOI-5338-3.1.0

Posted by ch...@apache.org.
Merge branch 'SPOI-5338-3.1.0' of https://github.com/ishark/Apex into ishark-SPOI-5338-3.1.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/9d08532a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/9d08532a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/9d08532a

Branch: refs/heads/master
Commit: 9d08532a7127bf57d8d0ace45c0a165581d17daf
Parents: 86c86c6 ba46e71
Author: thomas <th...@datatorrent.com>
Authored: Thu Aug 13 21:26:30 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Thu Aug 13 21:26:30 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/cli/DTCli.java   |  18 +-
 .../stram/webapp/OperatorDiscoverer.java        | 204 +++++++++----------
 .../stram/webapp/StramWebServices.java          |   8 +-
 .../com/datatorrent/stram/webapp/TypeGraph.java |  86 ++++++--
 .../stram/webapp/OperatorDiscoveryTest.java     |   9 +-
 5 files changed, 175 insertions(+), 150 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/9d08532a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/9d08532a/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
----------------------------------------------------------------------
diff --cc engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
index 8d7e346,d0b34c2..fc175af
--- a/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
@@@ -37,9 -37,8 +37,10 @@@ import org.slf4j.Logger
  import org.slf4j.LoggerFactory;
  
  import com.datatorrent.api.Component;
+ import com.datatorrent.api.InputOperator;
  import com.datatorrent.api.Operator;
 +
 +import com.datatorrent.common.util.BaseOperator;
  import com.datatorrent.netlet.util.DTThrowable;
  import com.datatorrent.stram.webapp.asm.ClassNodeType;
  import com.datatorrent.stram.webapp.asm.ClassSignatureVisitor;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/9d08532a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
----------------------------------------------------------------------


[12/50] incubator-apex-core git commit: SPOI-5821 #resolve Removed misleading warning from CLI since now it's possible for users to use it without installing the distro

Posted by ch...@apache.org.
SPOI-5821 #resolve Removed misleading warning from CLI since now it's possible for users to use it without installing the distro


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/453e8207
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/453e8207
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/453e8207

Branch: refs/heads/master
Commit: 453e820724a2111c880a3ed5d410b8af7d7cf51c
Parents: 19d6658
Author: David Yan <da...@datatorrent.com>
Authored: Thu Aug 6 13:56:57 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Thu Aug 6 13:56:57 2015 -0700

----------------------------------------------------------------------
 engine/src/main/java/com/datatorrent/stram/cli/DTCli.java | 3 ---
 1 file changed, 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/453e8207/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java b/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
index eff2404..e3c29de 100644
--- a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
+++ b/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
@@ -1481,9 +1481,6 @@ public class DTCli
   private void printWelcomeMessage()
   {
     System.out.println("DT CLI " + VersionInfo.getVersion() + " " + VersionInfo.getDate() + " " + VersionInfo.getRevision());
-    if (!StramClientUtils.configComplete(conf)) {
-      System.err.println("WARNING: Configuration of DataTorrent has not been complete. Please proceed with caution and only in development environment!");
-    }
   }
 
   private void printHelp(String command, CommandSpec commandSpec, PrintStream os)


[50/50] incubator-apex-core git commit: Bring the master upto date with release-3.1.0

Posted by ch...@apache.org.
Bring the master upto date with release-3.1.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/d7c8964b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/d7c8964b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/d7c8964b

Branch: refs/heads/master
Commit: d7c8964b7983fc3a8df38fbc85ec5291449d8960
Parents: b7475a0 03f612f
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Wed Sep 9 15:03:26 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Wed Sep 9 15:11:34 2015 -0700

----------------------------------------------------------------------
 CHANGELOG.md                                    | 115 +++++
 RELEASE.md                                      |  71 +++
 apex-app-archetype/README.md                    |   2 +-
 apex-app-archetype/pom.xml                      |   2 +-
 .../projects/basic/archetype.properties         |   2 +-
 apex-conf-archetype/README.md                   |   2 +-
 apex-conf-archetype/pom.xml                     |   2 +-
 .../projects/basic/archetype.properties         |   2 +-
 api/pom.xml                                     |  44 +-
 .../java/com/datatorrent/api/AutoMetric.java    |  11 +-
 .../main/java/com/datatorrent/api/Context.java  |   7 +
 .../annotation/InputPortFieldAnnotation.java    |  10 +-
 .../annotation/OutputPortFieldAnnotation.java   |  10 +
 bufferserver/pom.xml                            |   4 +-
 .../datatorrent/bufferserver/server/Server.java |   2 +-
 .../datatorrent/bufferserver/util/System.java   |   2 +-
 .../bufferserver/client/SubscriberTest.java     |   4 +-
 .../bufferserver/server/ServerTest.java         |   4 +-
 .../bufferserver/storage/DiskStorageTest.java   |   9 +-
 common/pom.xml                                  |  48 +-
 .../common/util/AsyncFSStorageAgent.java        | 143 ++++++
 .../datatorrent/common/util/BaseOperator.java   |   2 +
 .../datatorrent/common/util/FSStorageAgent.java |   5 +-
 .../common/codec/JsonStreamCodecTest.java       |  15 +-
 .../common/util/AsyncFSStorageAgentTest.java    | 133 ++++++
 engine/pom.xml                                  |  12 +-
 .../java/com/datatorrent/stram/StramClient.java |   8 +-
 .../datatorrent/stram/StramLocalCluster.java    |   4 +-
 .../stram/StreamingAppMasterService.java        |   2 +-
 .../stram/StreamingContainerManager.java        | 170 +++++---
 .../java/com/datatorrent/stram/cli/DTCli.java   |  34 +-
 .../java/com/datatorrent/stram/engine/Node.java |  64 ++-
 .../stram/engine/StreamingContainer.java        |   8 +-
 .../stram/plan/logical/LogicalPlan.java         |  30 +-
 .../plan/logical/LogicalPlanConfiguration.java  |  22 +-
 .../stram/plan/physical/PhysicalPlan.java       |  40 +-
 .../datatorrent/stram/stream/FastPublisher.java |   2 +
 .../datatorrent/stram/util/FSJsonLineFile.java  |  24 +-
 .../stram/webapp/OperatorDiscoverer.java        | 434 +++++++++++--------
 .../stram/webapp/StramWebServices.java          |   8 +-
 .../com/datatorrent/stram/webapp/TypeGraph.java | 119 ++++-
 .../com/datatorrent/stram/CheckpointTest.java   |  11 +-
 .../stram/LogicalPlanModificationTest.java      |  22 +-
 .../com/datatorrent/stram/MockContainer.java    |   2 +-
 .../com/datatorrent/stram/PartitioningTest.java |  26 +-
 .../stram/StramLocalClusterTest.java            |  22 +-
 .../datatorrent/stram/StramMiniClusterTest.java |   9 +-
 .../datatorrent/stram/StramRecoveryTest.java    |  58 ++-
 .../com/datatorrent/stram/StreamCodecTest.java  |  80 +---
 .../stram/StreamingContainerManagerTest.java    | 139 +++++-
 .../stram/client/AppPackageTest.java            |   2 +-
 .../stram/debug/TupleRecorderTest.java          |   3 +
 .../stram/engine/AtLeastOnceTest.java           |  16 +
 .../stram/engine/AtMostOnceTest.java            |   2 +-
 .../stram/engine/AutoMetricTest.java            |  12 +-
 .../stram/engine/InputOperatorTest.java         |   5 +-
 .../stram/engine/ProcessingModeTests.java       |  11 +-
 .../stram/engine/RecoverableInputOperator.java  |  10 +-
 .../datatorrent/stram/engine/SliderTest.java    |   5 +
 .../com/datatorrent/stram/engine/StatsTest.java |  10 +-
 .../stram/engine/StreamingContainerTest.java    |   6 +
 .../stram/engine/WindowGeneratorTest.java       |  11 +-
 .../plan/LogicalPlanConfigurationTest.java      |  83 +++-
 .../datatorrent/stram/plan/LogicalPlanTest.java |  16 +-
 .../stram/plan/SchemaTestOperator.java          |  33 ++
 .../stram/plan/physical/PhysicalPlanTest.java   |   9 +-
 .../stram/stream/FastStreamTest.java            |   6 +-
 .../stram/stream/OiOEndWindowTest.java          |   5 +
 .../stram/stream/SocketStreamTest.java          |   8 +-
 .../stram/support/StramTestSupport.java         |   7 +-
 .../stram/webapp/OperatorDiscoveryTest.java     | 163 +++++--
 .../stram/webapp/StramWebServicesTest.java      |   6 +-
 .../src/test/resources/schemaTestTopology.json  |  43 ++
 .../resources/testAppPackage/mydtapp/pom.xml    |   2 +-
 pom.xml                                         |   2 +-
 75 files changed, 1886 insertions(+), 591 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d7c8964b/CHANGELOG.md
----------------------------------------------------------------------
diff --cc CHANGELOG.md
index 141ff79,0158919..0f06d2d
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@@ -1,11 -1,27 +1,126 @@@
  Apex Changelog
  ========================================================================================================================
  
 +Version 3.0.0
 +------------------------------------------------------------------------------------------------------------------------
 +
 +* Add jersey client jar for app execution
 +* Must catch NoClassDefFoundError when processing operator classes in jar, previously catching Throwable was changed to catching Exception
++* Do not catch throwable in DTCli and exit immediately when worker thread throws an error
++* Depend on published netlet version
++* Catch throwable when executing command because launching an app can throw java.lang.VerifyError: bad type on operand stack
++* Removed runOnlyOnce when generating dt-git.properties and generate even when not using release profile
++* Undeploy heartbeat requests are not processes if container is idle
++* Fix potential NPE
++* Comment hide the actually type for string types(URI, URL, Class etc) and add 2 missing wrapper types
++* Fixed typo in webservice url for get-physical-plan command
++* Resolve deleting checkpoint in different thread
++* Removed duplicate code and added unit test for json stream codec
++* APEX-11 #resolve added checkpoint metric
++* Have default connect and read timeouts because the jersey defaults are infinity and that blocks threads and those threads can't be interrupted either
++* Removed invalid app data push annotation
++* Use FileContext instead of FileSystem to write the meta json file
++* Comment added required memory and required vcores in the appinfo
++* Comment filter abstract types from getAssignableClasses call and rename initializable to instantiable
++* Deploy all artifacts by default.
++* Comment fix the bug in trimming the graph
++* HA support for stram webservice filter.
++* Removed dependencies in filter to hadoop classes with private audience as their interface has changed from Hadoop 2.2 to 2.6
++* Related doc updates
++* Comment Prune the external dependencies from type graph and break the type circle
++* Fixing class not found issue due to missing dt-common jar.
++* Resolve removed old alert support in stram
++* Use tokens from Credentials as UserGroupInformation.getTokens() returns HDFS Namenode hidden tokens that should not be passed to containers.
++* Support for RM delegation token renewal in secure HA environments
++* Resolve fixed bug when custom metric keys are not stored correctly in meta file
++* Comment Use apache xbean shaded asm to resolve jdk 8 class and avoid conflict with old asm version that hadoop depends on
++* APEX-5 #resolve Set APPLICATION_PATH for unit tests that launch an app
++* Resolve Added dependency on dt-engine in test scope of archetype
++* Corrected the place to get pom.properties for version info
++* Token authentication support for buffer server
++* Adding default aggregator for primitive customMetrics
++* Netlet has a few fixed related to non firing OP_CONNECT call, so test against those.
++* Print stack trace when exception occurs
++* Commented out app master mb in properties.xml
++* Ability to extract javadocs as xml
++* Moved PAYLOAD_VALUE out of default case. Count number of skipped payload tuples.
++* Resolve fixing ASM signature visitor to add upper bound to Object for unbounded wild card entries
++* Removing unused imports
++* Addressing review comments
++* Reverting code format change
++* Resolve Added END_STREAM to be distributed to physical nodes. Introduced debug message for default case.
++* Comment removed the coding convention
++* Resolve Changed reading of portTypeInfo, port annotations in operator discoverer to read from ASM
++* Added test case for operator discoverer
++* Updated Type Discovery tests to getPortInfo via ASM instead of reflection API
++* Addressing review comments
++* Removed condition for public, final, transient ports when generating typeGraph using ASM
++* Added a class for storing input, output port annotations
++* Added port type info handling for ports other than DefaultInputPort and DefaultOutputPort types
++* Updated a test case for the same
++* Adding annotations to list.. Missed in previous commit
++* Added rescan of typegraph to capture operator ports
++* Changing ASMUtil utility methods to public
++* Addressing review comments.
++* Correcting jar entry name
++* Updated a test after merge
++* Removed system.out print from test cases and added debug print on failure
++* Removing temp file
++* Reversed the order of setting jvm_options for app master
++* Resolve Added validation for root operator should be input operator in logical plan validation
++* Fixing formatting and added lincense header
++* Removed extra line spaces
++* Correcting output port in dt-site.xml stream connection
++* Resolve Added validation for root operator should be input operator in logical plan validation
++* Fixing formatting and added lincense header
++* Removed extra line spaces
++* Correcting output port in dt-site.xml stream connection
++* Removing white spaces
++* Renaming the properties to reflect the code change
++* Comment add the missing open tag <p>
++* Removed unnecessary depedency
++* Resolve Remove unnecessary unpack in app package archetype
++* Resolve support java.lang.Class and resolve uiType to special types
++* Resolve update dependency to dt-common in archetype
++* Resolve added the container jvm options for app master
++* Resolve attach apa file as an artifact
++* Resolve test app package should depend on dt-common instead of dt-engine
++* Removed additional license header
++* Removed incorrectly left in license headers due to incorrect formatting
++* Put back revision info.
++* Added a TODO note for handling of stram delegation tokens for future
++* Comment Add dag attributes to LogicalPlanSerializer
++* Using resolveType method in OperatorDiscoverer to describe an attribute completely
++* Switch to Java7 and update compiler plugin.
++* Remove invalid module references.
++* Separated out HA token creation from non-HA case as it involves special handling that is subject to change if Hadoop's internals change.
++* Resolve - Add all resource managers to RM delegation token service.
++* Fix CLI script.
++* Stram directory is moved to engine
++* Fix depenency plugin version (2.3 shipped with maven 3.2.5 pulls dependencies from test scope)
++* Setup the pom files for OS with optimized dependencies
++* Changed the license header to Apache 2.0 license.
+ 
+ Version 3.1.0
+ ------------------------------------------------------------------------------------------------------------------------
+ 
+ ** Improvement
+     * [APEX-12] - Fix Base Operator To Not Show Name Property In App Builder
+ 
+ ** Bug
+     * [APEX-35] - Test exceptions due to missing directory in saveMetaInfo
+     * [APEX-36] - FSStorageAgent to account for HDFS lease when writing checkpoint files
+     * [APEX-37] - Container and operator json line file in StreamingContainerManager should not be appended from previous app attempt 
+     * [APEX-43] - SchemaSupport: TUPLE_CLASS attribute should use Class2String StringCodec
+     * [APEX-56] - Controlled plan modification on operator shutdown 
+ 
+ 
+ 
+ Version 3.0.0
+ ------------------------------------------------------------------------------------------------------------------------
+ 
+ * Add jersey client jar for app execution
+ * Must catch NoClassDefFoundError when processing operator classes in jar, previously catching Throwable was changed to catching Exception
  * Do not catch throwable in DTCli and exit immediately when worker thread throws an error
  * Depend on published netlet version
  * Catch throwable when executing command because launching an app can throw java.lang.VerifyError: bad type on operand stack

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d7c8964b/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d7c8964b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --cc engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index 95f4648,7002c1d..a9502b7
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@@ -494,12 -496,10 +496,13 @@@ public class StreamingContainerManager 
      }
  
      IOUtils.closeQuietly(containerFile);
-     for (FSJsonLineFile operatorFile : operatorFiles.values()) {
-       IOUtils.closeQuietly(operatorFile);
+     IOUtils.closeQuietly(operatorFile);
+     if(poolExecutor != null) {
+       poolExecutor.shutdown();
      }
 +    if(poolExecutor != null) {
 +      poolExecutor.shutdown();
 +    }
    }
  
    public void subscribeToEvents(Object listener)
@@@ -2246,11 -2265,11 +2268,15 @@@
      oi.currentWindowId = toWsWindowId(os.currentWindowId.get());
      if (os.lastHeartbeat != null) {
        oi.lastHeartbeat = os.lastHeartbeat.getGeneratedTms();
 +    }    
 +    if (os.checkpointStats != null) {
 +      oi.checkpointTime = os.checkpointStats.checkpointTime;
 +      oi.checkpointStartTime = os.checkpointStats.checkpointStartTime;
      }
+     if (os.checkpointStats != null) {
+       oi.checkpointTime = os.checkpointStats.checkpointTime;
+       oi.checkpointStartTime = os.checkpointStats.checkpointStartTime;
+     }
      oi.checkpointTimeMA = os.checkpointTimeMA.getAvg();
      for (PortStatus ps : os.inputPortStatusList.values()) {
        PortInfo pinfo = new PortInfo();


[28/50] incubator-apex-core git commit: Merge pull request #126 from tweise/fixWarnings

Posted by ch...@apache.org.
Merge pull request #126 from tweise/fixWarnings

Remove warnings.

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/159985ac
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/159985ac
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/159985ac

Branch: refs/heads/master
Commit: 159985ac40e482a54d3d73dacd2833dbdf8f4646
Parents: 8a13585 485c9ac
Author: David Yan <da...@datatorrent.com>
Authored: Tue Aug 11 14:55:44 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Tue Aug 11 14:55:44 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/plan/logical/LogicalPlan.java     | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
----------------------------------------------------------------------



[19/50] incubator-apex-core git commit: Merge pull request #114 from chandnisingh/SchemaSupport_3.1.0

Posted by ch...@apache.org.
Merge pull request #114 from chandnisingh/SchemaSupport_3.1.0

Fixed the schema class missing error

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/4e492191
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/4e492191
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/4e492191

Branch: refs/heads/master
Commit: 4e492191fd9c8b1b4d0d1599cc067ee8eb67172a
Parents: 8cbecac 430aec9
Author: Thomas Weise <th...@gmail.com>
Authored: Thu Aug 6 22:05:17 2015 -0700
Committer: Thomas Weise <th...@gmail.com>
Committed: Thu Aug 6 22:05:17 2015 -0700

----------------------------------------------------------------------
 .../stram/plan/logical/LogicalPlanConfiguration.java           | 6 +-----
 1 file changed, 1 insertion(+), 5 deletions(-)
----------------------------------------------------------------------



[30/50] incubator-apex-core git commit: Merge pull request #128 from chandnisingh/bugFix-APEX-43

Posted by ch...@apache.org.
Merge pull request #128 from chandnisingh/bugFix-APEX-43

APEX-43: assigning classToStringCodec to TUPLE_CLASS attr in PortContext

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/86c86c6f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/86c86c6f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/86c86c6f

Branch: refs/heads/master
Commit: 86c86c6f3144d7a26fc8745a3feb460279496eeb
Parents: 159985a 93b8c66
Author: Thomas Weise <th...@gmail.com>
Authored: Tue Aug 11 16:31:55 2015 -0700
Committer: Thomas Weise <th...@gmail.com>
Committed: Tue Aug 11 16:31:55 2015 -0700

----------------------------------------------------------------------
 .../main/java/com/datatorrent/api/Context.java  |  2 +-
 .../plan/LogicalPlanConfigurationTest.java      | 32 +++++++++++++++-----
 2 files changed, 26 insertions(+), 8 deletions(-)
----------------------------------------------------------------------



[35/50] incubator-apex-core git commit: Merge pull request #131 from chandnisingh/APEX-36-310

Posted by ch...@apache.org.
Merge pull request #131 from chandnisingh/APEX-36-310

APEX-36: calling tmp file '_tmp' instead of '._COPYING_' which is res…

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/ccf704ed
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/ccf704ed
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/ccf704ed

Branch: refs/heads/master
Commit: ccf704ed5b9b71ddc3d47e6a64cbc509bf72ebc0
Parents: b8c0b4c 7560cef
Author: Thomas Weise <th...@gmail.com>
Authored: Tue Aug 18 18:08:00 2015 -0700
Committer: Thomas Weise <th...@gmail.com>
Committed: Tue Aug 18 18:08:00 2015 -0700

----------------------------------------------------------------------
 .../src/main/java/com/datatorrent/common/util/FSStorageAgent.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[23/50] incubator-apex-core git commit: deleting the test folder quietly

Posted by ch...@apache.org.
deleting the test folder quietly


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/d57e0770
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/d57e0770
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/d57e0770

Branch: refs/heads/master
Commit: d57e0770d4202112b4837662c724f04c16cc9a29
Parents: 3f8f97e
Author: Chandni Singh <ch...@datatorrent.com>
Authored: Mon Aug 10 14:19:36 2015 -0700
Committer: Chandni Singh <ch...@datatorrent.com>
Committed: Mon Aug 10 14:26:48 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/engine/AutoMetricTest.java | 10 ----------
 .../com/datatorrent/stram/support/StramTestSupport.java   |  7 +------
 2 files changed, 1 insertion(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d57e0770/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java b/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java
index 752adeb..3ca5221 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java
@@ -15,7 +15,6 @@
  */
 package com.datatorrent.stram.engine;
 
-import java.io.File;
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.Collection;
@@ -23,7 +22,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.CountDownLatch;
 
-import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
 import org.junit.Rule;
@@ -231,8 +229,6 @@ public class AutoMetricTest
   {
     CountDownLatch latch = new CountDownLatch(1);
 
-    FileUtils.deleteDirectory(new File(testMeta.dir)); // clean any state from previous run
-
     LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(new Configuration());
     LogicalPlan dag = new LogicalPlan();
 
@@ -262,8 +258,6 @@ public class AutoMetricTest
   {
     CountDownLatch latch = new CountDownLatch(2);
 
-    FileUtils.deleteDirectory(new File(testMeta.dir)); // clean any state from previous run
-
     LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(new Configuration());
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.APPLICATION_PATH, testMeta.dir);
@@ -289,8 +283,6 @@ public class AutoMetricTest
   @Test
   public void testInjectionOfDefaultMetricsAggregator() throws Exception
   {
-    FileUtils.deleteDirectory(new File(testMeta.dir)); // clean any state from previous run
-
     LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(new Configuration());
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.APPLICATION_PATH, testMeta.dir);
@@ -367,8 +359,6 @@ public class AutoMetricTest
   {
     CountDownLatch latch = new CountDownLatch(1);
 
-    FileUtils.deleteDirectory(new File(testMeta.dir)); // clean any state from previous run
-
     LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(new Configuration());
     LogicalPlan dag = new LogicalPlan();
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/d57e0770/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java b/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java
index 167cfd8..71a402e 100644
--- a/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java
+++ b/engine/src/test/java/com/datatorrent/stram/support/StramTestSupport.java
@@ -265,12 +265,7 @@ abstract public class StramTestSupport
     @Override
     protected void finished(org.junit.runner.Description description)
     {
-      try {
-        FileUtils.deleteDirectory(new File(this.dir));
-      }
-      catch (IOException e) {
-        throw new RuntimeException(e);
-      }
+      FileUtils.deleteQuietly(new File(this.dir));
     }
   }
 


[47/50] incubator-apex-core git commit: Preparing for Release 3.1.0-RC2

Posted by ch...@apache.org.
Preparing for Release 3.1.0-RC2


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/b37262dc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/b37262dc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/b37262dc

Branch: refs/heads/master
Commit: b37262dcc8d5ba623208c934fb67811c161a833c
Parents: 3eb480d
Author: DataTorrent CI <je...@datatorrent.com>
Authored: Mon Aug 31 18:56:50 2015 +0530
Committer: DataTorrent CI <je...@datatorrent.com>
Committed: Mon Aug 31 18:56:50 2015 +0530

----------------------------------------------------------------------
 apex-app-archetype/README.md                                     | 2 +-
 apex-app-archetype/pom.xml                                       | 2 +-
 .../src/test/resources/projects/basic/archetype.properties       | 2 +-
 apex-conf-archetype/README.md                                    | 2 +-
 apex-conf-archetype/pom.xml                                      | 2 +-
 .../src/test/resources/projects/basic/archetype.properties       | 2 +-
 api/pom.xml                                                      | 4 ++--
 bufferserver/pom.xml                                             | 4 ++--
 common/pom.xml                                                   | 4 ++--
 engine/pom.xml                                                   | 2 +-
 .../test/java/com/datatorrent/stram/client/AppPackageTest.java   | 2 +-
 engine/src/test/resources/testAppPackage/mydtapp/pom.xml         | 2 +-
 pom.xml                                                          | 2 +-
 13 files changed, 16 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/apex-app-archetype/README.md
----------------------------------------------------------------------
diff --git a/apex-app-archetype/README.md b/apex-app-archetype/README.md
index 7edb310..7d79ddb 100644
--- a/apex-app-archetype/README.md
+++ b/apex-app-archetype/README.md
@@ -6,7 +6,7 @@ How to Generate an Apex Application Project Template
 
 Run the following command
 
-    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-app-archetype -DarchetypeVersion=3.1.0-RC1 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexapp -Dversion=1.0-SNAPSHOT
+    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-app-archetype -DarchetypeVersion=3.1.0-RC2 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexapp -Dversion=1.0-SNAPSHOT
 
 Using your favorite IDE, open the project that has just been created by the above command.
 Write your application code and optionally operator code 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/apex-app-archetype/pom.xml
----------------------------------------------------------------------
diff --git a/apex-app-archetype/pom.xml b/apex-app-archetype/pom.xml
index 514289a..c1f4c6c 100644
--- a/apex-app-archetype/pom.xml
+++ b/apex-app-archetype/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC1</version>
+    <version>3.1.0-RC2</version>
   </parent>
 
   <artifactId>apex-app-archetype</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
----------------------------------------------------------------------
diff --git a/apex-app-archetype/src/test/resources/projects/basic/archetype.properties b/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
index d34cfeb..839c22c 100644
--- a/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
+++ b/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
@@ -3,4 +3,4 @@ package=it.pkg
 version=0.1-SNAPSHOT
 groupId=archetype.it
 artifactId=basic
-archetypeVersion=3.1.0-RC1
+archetypeVersion=3.1.0-RC2

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/apex-conf-archetype/README.md
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/README.md b/apex-conf-archetype/README.md
index 3993c9f..8bd6b0d 100644
--- a/apex-conf-archetype/README.md
+++ b/apex-conf-archetype/README.md
@@ -6,7 +6,7 @@ How to Generate a Apex App Configuration Project Template
 
 Run the following command
 
-    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-conf-archetype -DarchetypeVersion=3.1.0-RC1 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexconf -Dversion=1.0-SNAPSHOT
+    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-conf-archetype -DarchetypeVersion=3.1.0-RC2 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexconf -Dversion=1.0-SNAPSHOT
 
 Using your favorite IDE, open the project that has just been created by the above command.
 Write your application code and optionally operator code 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/apex-conf-archetype/pom.xml
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/pom.xml b/apex-conf-archetype/pom.xml
index 4162e9b..faa70cd 100644
--- a/apex-conf-archetype/pom.xml
+++ b/apex-conf-archetype/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC1</version>
+    <version>3.1.0-RC2</version>
   </parent>
 
   <artifactId>apex-conf-archetype</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties b/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
index d34cfeb..839c22c 100644
--- a/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
+++ b/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
@@ -3,4 +3,4 @@ package=it.pkg
 version=0.1-SNAPSHOT
 groupId=archetype.it
 artifactId=basic
-archetypeVersion=3.1.0-RC1
+archetypeVersion=3.1.0-RC2

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/api/pom.xml
----------------------------------------------------------------------
diff --git a/api/pom.xml b/api/pom.xml
index 30e1e5b..d0cdc1f 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -6,7 +6,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC1</version>
+    <version>3.1.0-RC2</version>
   </parent>
 
   <artifactId>dt-api</artifactId>
@@ -38,7 +38,7 @@
             <dependency>
               <groupId>com.datatorrent</groupId>
               <artifactId>dt-api</artifactId>
-              <version>3.1.0-RC1</version>
+              <version>3.1.0-RC2</version>
             </dependency>
           </oldVersion>
           <newVersion>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/bufferserver/pom.xml
----------------------------------------------------------------------
diff --git a/bufferserver/pom.xml b/bufferserver/pom.xml
index 460ed10..e612c0e 100644
--- a/bufferserver/pom.xml
+++ b/bufferserver/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC1</version>
+    <version>3.1.0-RC2</version>
   </parent>
 
   <artifactId>dt-bufferserver</artifactId>
@@ -28,7 +28,7 @@
     <dependency>
       <groupId>com.datatorrent</groupId>
       <artifactId>dt-common</artifactId>
-      <version>3.1.0-RC1</version>
+      <version>3.1.0-RC2</version>
       <type>jar</type>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index def2a7d..7209d1d 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC1</version>
+    <version>3.1.0-RC2</version>
   </parent>
 
   <artifactId>dt-common</artifactId>
@@ -23,7 +23,7 @@
             <dependency>
               <groupId>com.datatorrent</groupId>
               <artifactId>dt-common</artifactId>
-              <version>3.1.0-RC1</version>
+              <version>3.1.0-RC2</version>
             </dependency>
           </oldVersion>
           <newVersion>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/engine/pom.xml
----------------------------------------------------------------------
diff --git a/engine/pom.xml b/engine/pom.xml
index 792a66d..47cee1a 100644
--- a/engine/pom.xml
+++ b/engine/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-RC1</version>
+    <version>3.1.0-RC2</version>
   </parent>
 
   <artifactId>dt-engine</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java b/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
index 7239f87..36b7e20 100644
--- a/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
@@ -72,7 +72,7 @@ public class AppPackageTest
   {
     Assert.assertEquals("mydtapp", json.getString("appPackageName"));
     Assert.assertEquals("1.0-SNAPSHOT", json.getString("appPackageVersion"));
-    Assert.assertEquals("3.1.0-RC1", json.getString("dtEngineVersion"));
+    Assert.assertEquals("3.1.0-RC2", json.getString("dtEngineVersion"));
     Assert.assertEquals("lib/*.jar", json.getJSONArray("classPath").getString(0));
 
     JSONObject application = json.getJSONArray("applications").getJSONObject(0);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
----------------------------------------------------------------------
diff --git a/engine/src/test/resources/testAppPackage/mydtapp/pom.xml b/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
index 48db4ac..c88ed44 100644
--- a/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
+++ b/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
@@ -13,7 +13,7 @@
 
   <properties>
     <!-- change this if you desire to use a different version of DataTorrent -->
-    <datatorrent.version>3.1.0-RC1</datatorrent.version>
+    <datatorrent.version>3.1.0-RC2</datatorrent.version>
     <datatorrent.apppackage.classpath>lib/*.jar</datatorrent.apppackage.classpath>
   </properties>
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b37262dc/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index b77a77b..3cdd106 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
 
   <groupId>com.datatorrent</groupId>
   <artifactId>dt-framework</artifactId>
-  <version>3.1.0-RC1</version>
+  <version>3.1.0-RC2</version>
   <packaging>pom</packaging>
 
   <name>Realtime Stream Processing Framework</name>


[14/50] incubator-apex-core git commit: APEX-35 #resolve Attempt to create directory before opening the meta file to write

Posted by ch...@apache.org.
APEX-35 #resolve Attempt to create directory before opening the meta file to write


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/45c7685a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/45c7685a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/45c7685a

Branch: refs/heads/master
Commit: 45c7685a0ca7a6cebcebaf0f3ab8b89788adc4b2
Parents: 19d6658
Author: David Yan <da...@datatorrent.com>
Authored: Thu Aug 6 13:31:05 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Thu Aug 6 17:51:52 2015 -0700

----------------------------------------------------------------------
 .../stram/StreamingContainerManager.java        | 48 +++++++++-----------
 1 file changed, 21 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/45c7685a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index 0847f3c..6840288 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@ -19,6 +19,7 @@ import java.io.*;
 import java.lang.management.ManagementFactory;
 import java.lang.reflect.Field;
 import java.net.InetSocketAddress;
+import java.net.URI;
 import java.util.*;
 import java.util.concurrent.*;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -26,6 +27,7 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import javax.annotation.Nullable;
 
+import com.datatorrent.netlet.util.DTThrowable;
 import com.esotericsoftware.kryo.KryoException;
 import com.esotericsoftware.kryo.io.Input;
 import com.esotericsoftware.kryo.io.Output;
@@ -173,6 +175,7 @@ public class StreamingContainerManager implements PlanContext
   private final Cache<Long, Object> commandResponse = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.MINUTES).build();
   private long lastLatencyWarningTime;
   private transient ExecutorService poolExecutor;
+  private FileContext fileContext;
 
   //logic operator name to a queue of logical metrics. this gets cleared periodically
   private final Map<String, Queue<Pair<Long, Map<String, Object>>>> logicalMetrics = Maps.newConcurrentMap();
@@ -329,22 +332,8 @@ public class StreamingContainerManager implements PlanContext
       this.eventBus = new MBassador<StramEvent>(BusConfiguration.Default(1, 1, 1));
     }
     this.plan = new PhysicalPlan(dag, this);
-    setupWsClient();
-    setupRecording(enableEventRecording);
-    setupStringCodecs();
     this.journal = new Journal(this);
-    try {
-      saveMetaInfo();
-    } catch (IOException ex) {
-      LOG.error("Error saving meta info to DFS", ex);
-    }
-
-    try {
-      this.containerFile = new FSJsonLineFile(new Path(this.vars.appPath + "/containers"), new FsPermission((short)0644));
-      this.containerFile.append(getAppMasterContainerInfo());
-    } catch (IOException ex) {
-      LOG.warn("Caught exception when instantiating for container info file. Ignoring", ex);
-    }
+    init(enableEventRecording);
   }
 
   private StreamingContainerManager(CheckpointState checkpointedState, boolean enableEventRecording)
@@ -354,20 +343,26 @@ public class StreamingContainerManager implements PlanContext
     poolExecutor = Executors.newFixedThreadPool(4);
     this.plan = checkpointedState.physicalPlan;
     this.eventBus = new MBassador<StramEvent>(BusConfiguration.Default(1, 1, 1));
+    this.journal = new Journal(this);
+    init(enableEventRecording);
+  }
+
+  private void init(boolean enableEventRecording)
+  {
     setupWsClient();
     setupRecording(enableEventRecording);
     setupStringCodecs();
-    this.journal = new Journal(this);
+
     try {
+      Path file = new Path(this.vars.appPath);
+      URI uri = file.toUri();
+      Configuration config = new YarnConfiguration();
+      fileContext = uri.getScheme() == null ? FileContext.getFileContext(config) : FileContext.getFileContext(uri, config);
       saveMetaInfo();
-    } catch (IOException ex) {
-      LOG.error("Error saving meta info to DFS", ex);
-    }
-    try {
-      this.containerFile = new FSJsonLineFile(new Path(this.vars.appPath + "/containers"), new FsPermission((short) 0644));
+      this.containerFile = new FSJsonLineFile(new Path(this.vars.appPath + "/containers"), FsPermission.getDefault());
       this.containerFile.append(getAppMasterContainerInfo());
     } catch (IOException ex) {
-      LOG.error("Caught exception when instantiating for container info file", ex);
+      DTThrowable.rethrow(ex);
     }
   }
 
@@ -858,9 +853,8 @@ public class StreamingContainerManager implements PlanContext
    */
   private void saveMetaInfo() throws IOException
   {
-    Path path = new Path(this.vars.appPath, APP_META_FILENAME + "." + System.nanoTime());
-    FileContext fc = FileContext.getFileContext(path.toUri());
-    try (FSDataOutputStream os = fc.create(path, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE))) {
+    Path file = new Path(this.vars.appPath, APP_META_FILENAME + "." + System.nanoTime());
+    try (FSDataOutputStream os = fileContext.create(file, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), Options.CreateOpts.CreateParent.createParent())) {
       JSONObject top = new JSONObject();
       JSONObject attributes = new JSONObject();
       for (Map.Entry<Attribute<?>, Object> entry : this.plan.getLogicalPlan().getAttributes().entrySet()) {
@@ -877,7 +871,7 @@ public class StreamingContainerManager implements PlanContext
       throw new RuntimeException(ex);
     }
     Path origPath = new Path(this.vars.appPath, APP_META_FILENAME);
-    fc.rename(path, origPath, Options.Rename.OVERWRITE);
+    fileContext.rename(file, origPath, Options.Rename.OVERWRITE);
   }
 
   public Queue<Pair<Long, Map<String, Object>>> getWindowMetrics(String operatorName)
@@ -1416,7 +1410,7 @@ public class StreamingContainerManager implements PlanContext
         try {
           FSJsonLineFile operatorFile = operatorFiles.get(ptOp.getId());
           if (operatorFile == null) {
-            operatorFiles.putIfAbsent(ptOp.getId(), new FSJsonLineFile(new Path(this.vars.appPath + "/operators/" + ptOp.getId()), new FsPermission((short)0644)));
+            operatorFiles.putIfAbsent(ptOp.getId(), new FSJsonLineFile(new Path(this.vars.appPath + "/operators/" + ptOp.getId()), FsPermission.getDefault()));
             operatorFile = operatorFiles.get(ptOp.getId());
           }
           JSONObject operatorInfo = new JSONObject();


[39/50] incubator-apex-core git commit: Merge pull request #134 from chandnisingh/v3.1.0

Posted by ch...@apache.org.
Merge pull request #134 from chandnisingh/v3.1.0

SPOI-6002: NPE while finding if a port type has schema classes

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/ffedce9b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/ffedce9b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/ffedce9b

Branch: refs/heads/master
Commit: ffedce9b6752e4c38a2c1ee469f5a85803ab14be
Parents: 3c5b88c e914fc9
Author: Thomas Weise <th...@gmail.com>
Authored: Thu Aug 20 13:51:47 2015 -0700
Committer: Thomas Weise <th...@gmail.com>
Committed: Thu Aug 20 13:51:47 2015 -0700

----------------------------------------------------------------------
 .../stram/webapp/OperatorDiscoverer.java           | 17 ++++++++++-------
 1 file changed, 10 insertions(+), 7 deletions(-)
----------------------------------------------------------------------



[02/50] incubator-apex-core git commit: APEX-14 #resolve added check to see if address is null

Posted by ch...@apache.org.
APEX-14 #resolve added check to see if address is null


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/0f9fb471
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/0f9fb471
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/0f9fb471

Branch: refs/heads/master
Commit: 0f9fb471ec6a4b4e36bd7c6569c9d61e41423685
Parents: 66a75e0
Author: Gaurav <ga...@datatorrent.com>
Authored: Fri Jul 24 13:06:04 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Tue Aug 4 11:11:13 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/engine/StreamingContainer.java  | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/0f9fb471/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java b/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
index 35861f1..71364a2 100644
--- a/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
+++ b/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
@@ -905,7 +905,8 @@ public class StreamingContainer extends YarnContainerMain
     bssc.put(StreamContext.EVENT_LOOP, eventloop);
     bssc.setBufferServerAddress(InetSocketAddress.createUnresolved(nodi.bufferServerHost, nodi.bufferServerPort));
     bssc.put(StreamContext.BUFFER_SERVER_TOKEN, nodi.bufferServerToken);
-    if (NetUtils.isLocalAddress(bssc.getBufferServerAddress().getAddress())) {
+    InetAddress inetAddress = bssc.getBufferServerAddress().getAddress();
+    if (inetAddress != null && NetUtils.isLocalAddress(inetAddress)) {
       bssc.setBufferServerAddress(new InetSocketAddress(InetAddress.getByName(null), nodi.bufferServerPort));
     }
 
@@ -1096,7 +1097,8 @@ public class StreamingContainer extends YarnContainerMain
 
             StreamContext context = new StreamContext(nidi.declaredStreamId);
             context.setBufferServerAddress(InetSocketAddress.createUnresolved(nidi.bufferServerHost, nidi.bufferServerPort));
-            if (NetUtils.isLocalAddress(context.getBufferServerAddress().getAddress())) {
+            InetAddress inetAddress = context.getBufferServerAddress().getAddress();
+            if (inetAddress != null && NetUtils.isLocalAddress(inetAddress)) {
               context.setBufferServerAddress(new InetSocketAddress(InetAddress.getByName(null), nidi.bufferServerPort));
             }
             context.put(StreamContext.BUFFER_SERVER_TOKEN, nidi.bufferServerToken);


[37/50] incubator-apex-core git commit: APEX-56 SPOI-4380 #resolve Remove terminated operators from plan after window is committed.

Posted by ch...@apache.org.
APEX-56 SPOI-4380 #resolve Remove terminated operators from plan after window is committed.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/76faf869
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/76faf869
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/76faf869

Branch: refs/heads/master
Commit: 76faf869506d004fc2f7d470f5bb89d681b470df
Parents: 3c5b88c
Author: thomas <th...@datatorrent.com>
Authored: Thu Aug 20 11:06:33 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Thu Aug 20 11:06:33 2015 -0700

----------------------------------------------------------------------
 .../stram/StreamingContainerManager.java        | 46 +++++++----
 .../stram/plan/physical/PhysicalPlan.java       | 32 +++++---
 .../com/datatorrent/stram/MockContainer.java    |  2 +-
 .../com/datatorrent/stram/StreamCodecTest.java  | 35 +--------
 .../stram/StreamingContainerManagerTest.java    | 81 +++++++++++++++++++-
 5 files changed, 134 insertions(+), 62 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/76faf869/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index 6e0f3f5..eed2948 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@ -154,6 +154,7 @@ public class StreamingContainerManager implements PlanContext
   private long lastResourceRequest = 0;
   private final Map<String, StreamingContainerAgent> containers = new ConcurrentHashMap<String, StreamingContainerAgent>();
   private final List<Pair<PTOperator, Long>> purgeCheckpoints = new ArrayList<Pair<PTOperator, Long>>();
+  private final Map<Long, Set<PTOperator>> shutdownOperators = new HashMap<>();
   private CriticalPathInfo criticalPathInfo;
   private final ConcurrentMap<PTOperator, PTOperator> reportStats = Maps.newConcurrentMap();
   private final AtomicBoolean deployChangeInProgress = new AtomicBoolean();
@@ -1003,6 +1004,26 @@ public class StreamingContainerManager implements PlanContext
       }
       reportStats.remove(o);
     }
+    
+    if (!this.shutdownOperators.isEmpty()) {
+      synchronized (this.shutdownOperators) {
+        Iterator<Map.Entry<Long, Set<PTOperator>>> it = shutdownOperators.entrySet().iterator();
+        while (it.hasNext()) {
+          Map.Entry<Long, Set<PTOperator>> windowAndOpers = it.next();
+          if (windowAndOpers.getKey().longValue() > this.committedWindowId) {
+            // wait until window is committed
+            continue;
+          } else {
+            LOG.info("Removing inactive operators at window {} {}", Codec.getStringWindowId(windowAndOpers.getKey()), windowAndOpers.getValue());
+            for (PTOperator oper : windowAndOpers.getValue()) {
+              plan.removeTerminatedPartition(oper);
+            }
+            it.remove();
+          }
+        }
+      }
+    }
+    
     if (!eventQueue.isEmpty()) {
       for (PTOperator oper : plan.getAllOperators().values()) {
         if (oper.getState() != PTOperator.State.ACTIVE) {
@@ -1274,20 +1295,19 @@ public class StreamingContainerManager implements PlanContext
         else {
           switch (ds) {
             case SHUTDOWN:
-              // remove the operator from the plan
-              Runnable r = new Runnable()
-              {
-                @Override
-                public void run()
-                {
-                  if (oper.getInputs().isEmpty()) {
-                    LOG.info("Removing IDLE operator from plan {}", oper);
-                    plan.removeIdlePartition(oper);
-                  }
+              // schedule operator deactivation against the windowId
+              // will be processed once window is committed and all dependent operators completed processing
+              long windowId = oper.stats.currentWindowId.get(); 
+              if (ohb.windowStats != null && !ohb.windowStats.isEmpty()) {
+                windowId = ohb.windowStats.get(ohb.windowStats.size()-1).windowId;
+              }
+              LOG.debug("Operator {} deactivated at window {}", oper, windowId);
+              synchronized (this.shutdownOperators) {
+                Set<PTOperator> deactivatedOpers = this.shutdownOperators.get(windowId);
+                if (deactivatedOpers == null) {
+                  this.shutdownOperators.put(windowId, deactivatedOpers = Sets.newHashSet(oper));
                 }
-
-              };
-              dispatch(r);
+              }
               sca.undeployOpers.add(oper.getId());
               // record operator stop event
               recordEventAsync(new StramEvent.StopOperatorEvent(oper.getName(), oper.getId(), oper.getContainer().getExternalId()));

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/76faf869/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java b/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
index 5b90c04..a57a248 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
@@ -780,7 +780,8 @@ public class PhysicalPlan implements Serializable
     partitioner.partitioned(mainPC.operatorIdToPartition);
   }
 
-  private void updateStreamMappings(PMapping m) {
+  private void updateStreamMappings(PMapping m)
+  {
     for (Map.Entry<OutputPortMeta, StreamMeta> opm : m.logicalOperator.getOutputStreams().entrySet()) {
       StreamMapping ug = m.outputStreams.get(opm.getKey());
       if (ug == null) {
@@ -789,7 +790,6 @@ public class PhysicalPlan implements Serializable
       }
       LOG.debug("update stream mapping for {} {}", opm.getKey().getOperatorMeta(), opm.getKey().getPortName());
       ug.setSources(m.partitions);
-      //ug.redoMapping();
     }
 
     for (Map.Entry<InputPortMeta, StreamMeta> ipm : m.logicalOperator.getInputStreams().entrySet()) {
@@ -847,7 +847,6 @@ public class PhysicalPlan implements Serializable
         }
         LOG.debug("update upstream stream mapping for {} {}", sourceMapping.logicalOperator, ipm.getValue().getSource().getPortName());
         ug.setSources(sourceMapping.partitions);
-        //ug.redoMapping();
       }
     }
 
@@ -990,18 +989,30 @@ public class PhysicalPlan implements Serializable
   }
 
   /**
-   * Remove a partition that was reported as idle by the execution layer.
-   * Since the end stream tuple is propagated to the downstream operators,
-   * there is no need to undeploy/redeploy them as part of this operation.
+   * Remove a partition that was reported as terminated by the execution layer.
+   * Recursively removes all downstream operators with no remaining input.
    * @param p
    */
-  public void removeIdlePartition(PTOperator p)
+  public void removeTerminatedPartition(PTOperator p)
   {
+    // keep track of downstream operators for cascading remove
+    Set<PTOperator> downstreamOpers = new HashSet<>(p.outputs.size());
+    for (PTOutput out : p.outputs) {
+      for (PTInput sinkIn : out.sinks) {
+        downstreamOpers.add(sinkIn.target);
+      }
+    }
     PMapping currentMapping = this.logicalToPTOperator.get(p.operatorMeta);
     List<PTOperator> copyPartitions = Lists.newArrayList(currentMapping.partitions);
     copyPartitions.remove(p);
     removePartition(p, currentMapping);
     currentMapping.partitions = copyPartitions;
+    // remove orphaned downstream operators
+    for (PTOperator dop : downstreamOpers) {
+      if (dop.inputs.isEmpty()) {
+        removeTerminatedPartition(dop);
+      }
+    }
     deployChanges();
   }
 
@@ -1012,8 +1023,8 @@ public class PhysicalPlan implements Serializable
    * @param oper
    * @return
    */
-  private void removePartition(PTOperator oper, PMapping operatorMapping) {
-
+  private void removePartition(PTOperator oper, PMapping operatorMapping)
+  {
     // remove any parallel partition
     for (PTOutput out : oper.outputs) {
       // copy list as it is modified by recursive remove
@@ -1137,7 +1148,8 @@ public class PhysicalPlan implements Serializable
     return inputPortList;
   }
 
-  void removePTOperator(PTOperator oper) {
+  void removePTOperator(PTOperator oper)
+  {
     LOG.debug("Removing operator " + oper);
 
     // per partition merge operators

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/76faf869/engine/src/test/java/com/datatorrent/stram/MockContainer.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/MockContainer.java b/engine/src/test/java/com/datatorrent/stram/MockContainer.java
index 7a6ba64..c0b704f 100644
--- a/engine/src/test/java/com/datatorrent/stram/MockContainer.java
+++ b/engine/src/test/java/com/datatorrent/stram/MockContainer.java
@@ -91,7 +91,7 @@ public class MockContainer
     for (Map.Entry<Integer, MockOperatorStats> oe : this.stats.entrySet()) {
       OperatorHeartbeat ohb = new OperatorHeartbeat();
       ohb.setNodeId(oe.getKey());
-      ohb.setState(OperatorHeartbeat.DeployState.ACTIVE);
+      ohb.setState(oe.getValue().deployState);
       OperatorStats lstats = new OperatorStats();
       lstats.checkpoint = new Checkpoint(oe.getValue().checkpointWindowId, 0, 0);
       lstats.windowId = oe.getValue().currentWindowId;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/76faf869/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java b/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java
index 9726e65..d7a7fff 100644
--- a/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StreamCodecTest.java
@@ -1178,28 +1178,6 @@ public class StreamCodecTest
     return unifiers;
   }
 
-  private void checkNotSetStreamCodecInfo(Map<Integer, StreamCodec<?>> streamCodecs, String id,
-                                          Integer streamCodecIdentifier)
-  {
-    StreamCodec<?> streamCodecInfo = streamCodecs.get(streamCodecIdentifier);
-    Assert.assertNotNull("stream codec null " + id, streamCodecInfo);
-    Assert.assertNull("stream codec object not null " + id, streamCodecInfo);
-  }
-
-  private void checkStreamCodecInfo(Map<Integer, StreamCodec<?>> streamCodecs, String id,
-                                    Integer streamCodecIdentifier, StreamCodec<?> streamCodec)
-  {
-    checkStreamCodecInfo(streamCodecs, id, streamCodecIdentifier, streamCodec, null);
-  }
-
-  private void checkStreamCodecInfo(Map<Integer, StreamCodec<?>> streamCodecs, String id,
-                                    Integer streamCodecIdentifier, StreamCodec<?> streamCodec, String className)
-  {
-    StreamCodec<?> streamCodecInfo = streamCodecs.get(streamCodecIdentifier);
-    Assert.assertNotNull("stream codec info null " + id, streamCodecInfo);
-    Assert.assertEquals("stream codec object " + id, streamCodec, streamCodecInfo);
-  }
-
   private void checkPresentStreamCodec(LogicalPlan.OperatorMeta operatorMeta, Operator.InputPort<?> inputPort,
                                        Map<Integer, StreamCodec<?>> streamCodecs,
                                        String id, PhysicalPlan plan )
@@ -1277,17 +1255,6 @@ public class StreamCodecTest
     return otdi;
   }
 
-  private LogicalPlan.InputPortMeta getInputPortMeta(LogicalPlan.StreamMeta streamMeta, LogicalPlan.OperatorMeta operatorMeta)
-  {
-    LogicalPlan.InputPortMeta portMeta = null;
-    for (Map.Entry<LogicalPlan.InputPortMeta, LogicalPlan.StreamMeta> entry : operatorMeta.getInputStreams().entrySet()) {
-      if (entry.getValue() == streamMeta) {
-        portMeta = entry.getKey();
-      }
-    }
-    return portMeta;
-  }
-
   // For tests so that it doesn't trigger assignment of a new id
   public boolean isStrCodecPresent(StreamCodec<?> streamCodecInfo, PhysicalPlan plan)
   {
@@ -1316,7 +1283,7 @@ public class StreamCodecTest
 
   public static class DefaultTestStreamCodec  extends DefaultStatefulStreamCodec<Object> implements Serializable
   {
-
+    private static final long serialVersionUID = 1L;
   }
 
   public static class DefaultCodecOperator extends GenericTestOperator

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/76faf869/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
index a238e3e..89f2878 100644
--- a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
@@ -31,7 +31,6 @@ import org.junit.Test;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-
 import com.datatorrent.api.Context;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.Context.PortContext;
@@ -40,10 +39,10 @@ import com.datatorrent.api.Stats.OperatorStats;
 import com.datatorrent.api.Stats.OperatorStats.PortStats;
 import com.datatorrent.api.StatsListener;
 import com.datatorrent.api.annotation.Stateless;
-
 import com.datatorrent.common.partitioner.StatelessPartitioner;
 import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.FSStorageAgent;
+import com.datatorrent.stram.MockContainer.MockOperatorStats;
 import com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest;
 import com.datatorrent.stram.StreamingContainerManager.ContainerResource;
 import com.datatorrent.stram.api.AppDataSource;
@@ -56,6 +55,7 @@ import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.ContainerHe
 import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.ContainerHeartbeatResponse;
 import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.ContainerStats;
 import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.OperatorHeartbeat;
+import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.OperatorHeartbeat.DeployState;
 import com.datatorrent.stram.appdata.AppDataPushAgent;
 import com.datatorrent.stram.codec.DefaultStatefulStreamCodec;
 import com.datatorrent.stram.engine.*;
@@ -72,12 +72,14 @@ import com.datatorrent.stram.support.StramTestSupport.EmbeddedWebSocketServer;
 import com.datatorrent.stram.support.StramTestSupport.MemoryStorageAgent;
 import com.datatorrent.stram.support.StramTestSupport.TestMeta;
 import com.datatorrent.stram.tuple.Tuple;
+
 import org.apache.commons.lang.StringUtils;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.eclipse.jetty.websocket.WebSocket;
 
-public class StreamingContainerManagerTest {
+public class StreamingContainerManagerTest
+{
   @Rule public TestMeta testMeta = new TestMeta();
 
   @Test
@@ -703,6 +705,74 @@ public class StreamingContainerManagerTest {
     Assert.assertEquals("type " + o1DeployInfo, OperatorDeployInfo.OperatorType.INPUT, o1DeployInfo.type);
   }
 
+  @Test
+  public void testOperatorShutdown()
+  {
+    LogicalPlan dag = new LogicalPlan();
+    dag.setAttribute(com.datatorrent.api.Context.DAGContext.APPLICATION_PATH, testMeta.dir);
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new MemoryStorageAgent());
+
+    GenericTestOperator o1 = dag.addOperator("o1", GenericTestOperator.class);
+    GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
+    dag.addStream("stream1", o1.outport1, o2.inport1);
+
+    StreamingContainerManager scm = new StreamingContainerManager(dag);
+
+    PhysicalPlan physicalPlan = scm.getPhysicalPlan();
+    Map<PTContainer, MockContainer> mockContainers = new HashMap<>();
+    for (PTContainer c : physicalPlan.getContainers()) {
+      MockContainer mc = new MockContainer(scm, c);
+      mockContainers.put(c, mc);
+    }
+    // deploy all containers
+    for (Map.Entry<PTContainer, MockContainer> ce : mockContainers.entrySet()) {
+      ce.getValue().deploy();
+      // skip buffer server purge in monitorHeartbeat
+      ce.getKey().bufferServerAddress = null;
+    }
+
+    PTOperator o1p1 = physicalPlan.getOperators(dag.getMeta(o1)).get(0);
+    MockContainer mc1 = mockContainers.get(o1p1.getContainer());
+    MockOperatorStats o1p1mos = mc1.stats(o1p1.getId());
+    o1p1mos.currentWindowId(1).checkpointWindowId(1).deployState(DeployState.ACTIVE);
+    mc1.sendHeartbeat();
+
+    PTOperator o2p1 = physicalPlan.getOperators(dag.getMeta(o2)).get(0);
+    MockContainer mc2 = mockContainers.get(o2p1.getContainer());
+    MockOperatorStats o2p1mos = mc2.stats(o2p1.getId());
+    o2p1mos.currentWindowId(1).checkpointWindowId(1).deployState(DeployState.ACTIVE);
+    mc2.sendHeartbeat();
+
+    o1p1mos.currentWindowId(2).deployState(DeployState.SHUTDOWN);
+    mc1.sendHeartbeat();
+    scm.monitorHeartbeat();
+    Assert.assertEquals("committedWindowId", -1, scm.getCommittedWindowId());
+    scm.monitorHeartbeat(); // committedWindowId updated in next cycle
+    Assert.assertEquals("committedWindowId", 1, scm.getCommittedWindowId());
+    scm.processEvents();
+    Assert.assertEquals("containers at committedWindowId=1", 2, physicalPlan.getContainers().size());
+
+    // checkpoint window 2
+    o1p1mos.checkpointWindowId(2);
+    mc1.sendHeartbeat();
+    scm.monitorHeartbeat();
+
+    o2p1mos.currentWindowId(2).checkpointWindowId(2);
+    mc2.sendHeartbeat();
+    scm.monitorHeartbeat();
+    Assert.assertEquals("committedWindowId", 1, scm.getCommittedWindowId());
+    scm.monitorHeartbeat(); // committedWindowId updated in next cycle
+    Assert.assertEquals("committedWindowId", 2, scm.getCommittedWindowId());
+    Assert.assertEquals(1, o1p1.getContainer().getOperators().size());
+    Assert.assertEquals(1, o2p1.getContainer().getOperators().size());
+    Assert.assertEquals(2, physicalPlan.getContainers().size());
+
+    // call again as events are processed after committed window was updated
+    scm.processEvents();
+    Assert.assertEquals(0, o1p1.getContainer().getOperators().size());
+    Assert.assertEquals(0, o2p1.getContainer().getOperators().size());
+    Assert.assertEquals(0, physicalPlan.getContainers().size());
+  }
 
   private void testDownStreamPartition(Locality locality) throws Exception
   {
@@ -738,7 +808,8 @@ public class StreamingContainerManagerTest {
   }
 
   @Test
-  public void testPhysicalPropertyUpdate() throws Exception{
+  public void testPhysicalPropertyUpdate() throws Exception
+  {
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
     TestGeneratorInputOperator o1 = dag.addOperator("o1", TestGeneratorInputOperator.class);
@@ -755,6 +826,7 @@ public class StreamingContainerManagerTest {
     Future<?> future = dnmgr.getPhysicalOperatorProperty(lc.getPlanOperators(dag.getMeta(o1)).get(0).getId(), "maxTuples", 10000);
     Object object = future.get(10000, TimeUnit.MILLISECONDS);
     Assert.assertNotNull(object);
+    @SuppressWarnings("unchecked")
     Map<String, Object> propertyValue = (Map<String, Object>)object;
     Assert.assertEquals(2,propertyValue.get("maxTuples"));
     lc.shutdown();
@@ -873,6 +945,7 @@ public class StreamingContainerManagerTest {
       pushAgent.pushData();
       Thread.sleep(1000);
       Assert.assertTrue(messages.size() > 0);
+      pushAgent.close();
       JSONObject message = messages.get(0);
       System.out.println("Got this message: " + message.toString(2));
       Assert.assertEquals(topic, message.getString("topic"));


[41/50] incubator-apex-core git commit: Fix for review comment.

Posted by ch...@apache.org.
Fix for review comment.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/064edf08
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/064edf08
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/064edf08

Branch: refs/heads/master
Commit: 064edf08447d88ad9147bfab460705dfbaf4b4f3
Parents: d19fa66
Author: thomas <th...@datatorrent.com>
Authored: Fri Aug 21 12:43:33 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Fri Aug 21 12:43:33 2015 -0700

----------------------------------------------------------------------
 .../com/datatorrent/stram/StreamingContainerManager.java | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/064edf08/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index eed2948..7002c1d 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@ -1004,7 +1004,7 @@ public class StreamingContainerManager implements PlanContext
       }
       reportStats.remove(o);
     }
-    
+
     if (!this.shutdownOperators.isEmpty()) {
       synchronized (this.shutdownOperators) {
         Iterator<Map.Entry<Long, Set<PTOperator>>> it = shutdownOperators.entrySet().iterator();
@@ -1023,7 +1023,7 @@ public class StreamingContainerManager implements PlanContext
         }
       }
     }
-    
+
     if (!eventQueue.isEmpty()) {
       for (PTOperator oper : plan.getAllOperators().values()) {
         if (oper.getState() != PTOperator.State.ACTIVE) {
@@ -1297,7 +1297,7 @@ public class StreamingContainerManager implements PlanContext
             case SHUTDOWN:
               // schedule operator deactivation against the windowId
               // will be processed once window is committed and all dependent operators completed processing
-              long windowId = oper.stats.currentWindowId.get(); 
+              long windowId = oper.stats.currentWindowId.get();
               if (ohb.windowStats != null && !ohb.windowStats.isEmpty()) {
                 windowId = ohb.windowStats.get(ohb.windowStats.size()-1).windowId;
               }
@@ -1305,8 +1305,9 @@ public class StreamingContainerManager implements PlanContext
               synchronized (this.shutdownOperators) {
                 Set<PTOperator> deactivatedOpers = this.shutdownOperators.get(windowId);
                 if (deactivatedOpers == null) {
-                  this.shutdownOperators.put(windowId, deactivatedOpers = Sets.newHashSet(oper));
+                  this.shutdownOperators.put(windowId, deactivatedOpers = new HashSet<>());
                 }
+                deactivatedOpers.add(oper);
               }
               sca.undeployOpers.add(oper.getId());
               // record operator stop event
@@ -2264,7 +2265,7 @@ public class StreamingContainerManager implements PlanContext
     oi.currentWindowId = toWsWindowId(os.currentWindowId.get());
     if (os.lastHeartbeat != null) {
       oi.lastHeartbeat = os.lastHeartbeat.getGeneratedTms();
-    }    
+    }
     if (os.checkpointStats != null) {
       oi.checkpointTime = os.checkpointStats.checkpointTime;
       oi.checkpointStartTime = os.checkpointStats.checkpointStartTime;


[46/50] incubator-apex-core git commit: Version Changes for v3.1.0-RC1

Posted by ch...@apache.org.
Version Changes for v3.1.0-RC1


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/3eb480d6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/3eb480d6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/3eb480d6

Branch: refs/heads/master
Commit: 3eb480d664e0b1362068d6c56d537bb3ed0183a7
Parents: b2fb001
Author: DataTorrent CI <je...@datatorrent.com>
Authored: Tue Aug 25 11:49:16 2015 +0530
Committer: DataTorrent CI <je...@datatorrent.com>
Committed: Tue Aug 25 11:49:16 2015 +0530

----------------------------------------------------------------------
 apex-app-archetype/README.md                                     | 2 +-
 apex-app-archetype/pom.xml                                       | 2 +-
 .../src/test/resources/projects/basic/archetype.properties       | 2 +-
 apex-conf-archetype/README.md                                    | 2 +-
 apex-conf-archetype/pom.xml                                      | 2 +-
 .../src/test/resources/projects/basic/archetype.properties       | 2 +-
 api/pom.xml                                                      | 4 ++--
 bufferserver/pom.xml                                             | 4 ++--
 common/pom.xml                                                   | 4 ++--
 engine/pom.xml                                                   | 2 +-
 .../test/java/com/datatorrent/stram/client/AppPackageTest.java   | 2 +-
 engine/src/test/resources/testAppPackage/mydtapp/pom.xml         | 2 +-
 pom.xml                                                          | 2 +-
 13 files changed, 16 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/apex-app-archetype/README.md
----------------------------------------------------------------------
diff --git a/apex-app-archetype/README.md b/apex-app-archetype/README.md
index 775f026..7edb310 100644
--- a/apex-app-archetype/README.md
+++ b/apex-app-archetype/README.md
@@ -6,7 +6,7 @@ How to Generate an Apex Application Project Template
 
 Run the following command
 
-    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-app-archetype -DarchetypeVersion=3.0.0 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexapp -Dversion=1.0-SNAPSHOT
+    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-app-archetype -DarchetypeVersion=3.1.0-RC1 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexapp -Dversion=1.0-SNAPSHOT
 
 Using your favorite IDE, open the project that has just been created by the above command.
 Write your application code and optionally operator code 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/apex-app-archetype/pom.xml
----------------------------------------------------------------------
diff --git a/apex-app-archetype/pom.xml b/apex-app-archetype/pom.xml
index 7d931e3..514289a 100644
--- a/apex-app-archetype/pom.xml
+++ b/apex-app-archetype/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-SNAPSHOT</version>
+    <version>3.1.0-RC1</version>
   </parent>
 
   <artifactId>apex-app-archetype</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
----------------------------------------------------------------------
diff --git a/apex-app-archetype/src/test/resources/projects/basic/archetype.properties b/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
index a5faf73..d34cfeb 100644
--- a/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
+++ b/apex-app-archetype/src/test/resources/projects/basic/archetype.properties
@@ -3,4 +3,4 @@ package=it.pkg
 version=0.1-SNAPSHOT
 groupId=archetype.it
 artifactId=basic
-archetypeVersion=3.0.0
+archetypeVersion=3.1.0-RC1

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/apex-conf-archetype/README.md
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/README.md b/apex-conf-archetype/README.md
index d54876c..3993c9f 100644
--- a/apex-conf-archetype/README.md
+++ b/apex-conf-archetype/README.md
@@ -6,7 +6,7 @@ How to Generate a Apex App Configuration Project Template
 
 Run the following command
 
-    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-conf-archetype -DarchetypeVersion=3.0.0 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexconf -Dversion=1.0-SNAPSHOT
+    mvn archetype:generate -DarchetypeGroupId=com.datatorrent -DarchetypeArtifactId=apex-conf-archetype -DarchetypeVersion=3.1.0-RC1 -DgroupId=com.example -Dpackage=com.example.myapexapp -DartifactId=myapexconf -Dversion=1.0-SNAPSHOT
 
 Using your favorite IDE, open the project that has just been created by the above command.
 Write your application code and optionally operator code 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/apex-conf-archetype/pom.xml
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/pom.xml b/apex-conf-archetype/pom.xml
index 1f510a2..4162e9b 100644
--- a/apex-conf-archetype/pom.xml
+++ b/apex-conf-archetype/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-SNAPSHOT</version>
+    <version>3.1.0-RC1</version>
   </parent>
 
   <artifactId>apex-conf-archetype</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
----------------------------------------------------------------------
diff --git a/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties b/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
index a5faf73..d34cfeb 100644
--- a/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
+++ b/apex-conf-archetype/src/test/resources/projects/basic/archetype.properties
@@ -3,4 +3,4 @@ package=it.pkg
 version=0.1-SNAPSHOT
 groupId=archetype.it
 artifactId=basic
-archetypeVersion=3.0.0
+archetypeVersion=3.1.0-RC1

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/api/pom.xml
----------------------------------------------------------------------
diff --git a/api/pom.xml b/api/pom.xml
index 12cdd51..30e1e5b 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -6,7 +6,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-SNAPSHOT</version>
+    <version>3.1.0-RC1</version>
   </parent>
 
   <artifactId>dt-api</artifactId>
@@ -38,7 +38,7 @@
             <dependency>
               <groupId>com.datatorrent</groupId>
               <artifactId>dt-api</artifactId>
-              <version>3.0.0</version>
+              <version>3.1.0-RC1</version>
             </dependency>
           </oldVersion>
           <newVersion>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/bufferserver/pom.xml
----------------------------------------------------------------------
diff --git a/bufferserver/pom.xml b/bufferserver/pom.xml
index 02ea4a4..460ed10 100644
--- a/bufferserver/pom.xml
+++ b/bufferserver/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-SNAPSHOT</version>
+    <version>3.1.0-RC1</version>
   </parent>
 
   <artifactId>dt-bufferserver</artifactId>
@@ -28,7 +28,7 @@
     <dependency>
       <groupId>com.datatorrent</groupId>
       <artifactId>dt-common</artifactId>
-      <version>3.1.0-SNAPSHOT</version>
+      <version>3.1.0-RC1</version>
       <type>jar</type>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index 1a9acfd..def2a7d 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-SNAPSHOT</version>
+    <version>3.1.0-RC1</version>
   </parent>
 
   <artifactId>dt-common</artifactId>
@@ -23,7 +23,7 @@
             <dependency>
               <groupId>com.datatorrent</groupId>
               <artifactId>dt-common</artifactId>
-              <version>3.0.0</version>
+              <version>3.1.0-RC1</version>
             </dependency>
           </oldVersion>
           <newVersion>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/engine/pom.xml
----------------------------------------------------------------------
diff --git a/engine/pom.xml b/engine/pom.xml
index c91265c..792a66d 100644
--- a/engine/pom.xml
+++ b/engine/pom.xml
@@ -5,7 +5,7 @@
   <parent>
     <groupId>com.datatorrent</groupId>
     <artifactId>dt-framework</artifactId>
-    <version>3.1.0-SNAPSHOT</version>
+    <version>3.1.0-RC1</version>
   </parent>
 
   <artifactId>dt-engine</artifactId>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java b/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
index bf41270..7239f87 100644
--- a/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/client/AppPackageTest.java
@@ -72,7 +72,7 @@ public class AppPackageTest
   {
     Assert.assertEquals("mydtapp", json.getString("appPackageName"));
     Assert.assertEquals("1.0-SNAPSHOT", json.getString("appPackageVersion"));
-    Assert.assertEquals("3.0.0", json.getString("dtEngineVersion"));
+    Assert.assertEquals("3.1.0-RC1", json.getString("dtEngineVersion"));
     Assert.assertEquals("lib/*.jar", json.getJSONArray("classPath").getString(0));
 
     JSONObject application = json.getJSONArray("applications").getJSONObject(0);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
----------------------------------------------------------------------
diff --git a/engine/src/test/resources/testAppPackage/mydtapp/pom.xml b/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
index 0c40dda..48db4ac 100644
--- a/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
+++ b/engine/src/test/resources/testAppPackage/mydtapp/pom.xml
@@ -13,7 +13,7 @@
 
   <properties>
     <!-- change this if you desire to use a different version of DataTorrent -->
-    <datatorrent.version>3.0.0</datatorrent.version>
+    <datatorrent.version>3.1.0-RC1</datatorrent.version>
     <datatorrent.apppackage.classpath>lib/*.jar</datatorrent.apppackage.classpath>
   </properties>
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3eb480d6/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 852cc3e..b77a77b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
 
   <groupId>com.datatorrent</groupId>
   <artifactId>dt-framework</artifactId>
-  <version>3.1.0-SNAPSHOT</version>
+  <version>3.1.0-RC1</version>
   <packaging>pom</packaging>
 
   <name>Realtime Stream Processing Framework</name>


[15/50] incubator-apex-core git commit: Merge branch 'APEX-35' of https://github.com/davidyan74/Apex into davidyan74-APEX-35

Posted by ch...@apache.org.
Merge branch 'APEX-35' of https://github.com/davidyan74/Apex into davidyan74-APEX-35


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/d934b978
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/d934b978
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/d934b978

Branch: refs/heads/master
Commit: d934b978c195ba832b5350ad59b6d3ffe7fd810f
Parents: c5d819b 45c7685
Author: thomas <th...@datatorrent.com>
Authored: Thu Aug 6 20:43:08 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Thu Aug 6 20:43:08 2015 -0700

----------------------------------------------------------------------
 .../stram/StreamingContainerManager.java        | 48 +++++++++-----------
 1 file changed, 21 insertions(+), 27 deletions(-)
----------------------------------------------------------------------



[49/50] incubator-apex-core git commit: Preparing for Release 3.1.0

Posted by ch...@apache.org.
Preparing for Release 3.1.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/03f612ff
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/03f612ff
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/03f612ff

Branch: refs/heads/master
Commit: 03f612ff858165883daa1630e9af8ed80edc5bfb
Parents: ebc83f8
Author: DataTorrent CI <je...@datatorrent.com>
Authored: Tue Sep 1 05:55:17 2015 -0700
Committer: DataTorrent CI <je...@datatorrent.com>
Committed: Tue Sep 1 05:55:17 2015 -0700

----------------------------------------------------------------------
 CHANGELOG.md | 16 ++++++++++++++++
 RELEASE.md   | 17 +++++++++++++++++
 2 files changed, 33 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/03f612ff/CHANGELOG.md
----------------------------------------------------------------------
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 141ff79..0158919 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,22 @@
 Apex Changelog
 ========================================================================================================================
 
+
+Version 3.1.0
+------------------------------------------------------------------------------------------------------------------------
+
+** Improvement
+    * [APEX-12] - Fix Base Operator To Not Show Name Property In App Builder
+
+** Bug
+    * [APEX-35] - Test exceptions due to missing directory in saveMetaInfo
+    * [APEX-36] - FSStorageAgent to account for HDFS lease when writing checkpoint files
+    * [APEX-37] - Container and operator json line file in StreamingContainerManager should not be appended from previous app attempt 
+    * [APEX-43] - SchemaSupport: TUPLE_CLASS attribute should use Class2String StringCodec
+    * [APEX-56] - Controlled plan modification on operator shutdown 
+
+
+
 Version 3.0.0
 ------------------------------------------------------------------------------------------------------------------------
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/03f612ff/RELEASE.md
----------------------------------------------------------------------
diff --git a/RELEASE.md b/RELEASE.md
index 3ea89d7..196e7e6 100644
--- a/RELEASE.md
+++ b/RELEASE.md
@@ -1,6 +1,23 @@
 DataTorrent RTS Release Notes
 ========================================================================================================================
 
+Version 3.1.0
+------------------------------------------------------------------------------------------------------------------------
+
+### Operator Improvements
+
+* Fix Base Operator To Not Show Name Property In App Builder
+
+### Few Bug Fixes
+
+* Test exceptions due to missing directory in saveMetaInfo
+* FSStorageAgent to account for HDFS lease when writing checkpoint files
+* Container and operator json line file in StreamingContainerManager should not be appended from previous app attempt 
+* SchemaSupport: TUPLE_CLASS attribute should use Class2String StringCodec
+* Controlled plan modification on operator shutdown 
+* Fix Base Operator To Not Show Name Property In App Builder
+
+
 Version 3.0.0
 ------------------------------------------------------------------------------------------------------------------------
 


[09/50] incubator-apex-core git commit: Schema Support

Posted by ch...@apache.org.
Schema Support


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/61929b58
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/61929b58
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/61929b58

Branch: refs/heads/master
Commit: 61929b58f9dbf32c281c845197445e728fffa866
Parents: 66a75e0
Author: Chandni Singh <ch...@datatorrent.com>
Authored: Sun Aug 2 13:29:16 2015 -0700
Committer: Chandni Singh <ch...@datatorrent.com>
Committed: Wed Aug 5 13:13:05 2015 -0700

----------------------------------------------------------------------
 .../main/java/com/datatorrent/api/Context.java  |   7 +
 .../annotation/InputPortFieldAnnotation.java    |  10 +-
 .../annotation/OutputPortFieldAnnotation.java   |  10 +
 .../java/com/datatorrent/stram/cli/DTCli.java   |  15 +-
 .../stram/plan/logical/LogicalPlan.java         |  15 ++
 .../plan/logical/LogicalPlanConfiguration.java  |  26 +-
 .../stram/webapp/OperatorDiscoverer.java        | 235 +++++++++++++------
 .../com/datatorrent/stram/webapp/TypeGraph.java |  28 +++
 .../plan/LogicalPlanConfigurationTest.java      |  65 ++++-
 .../stram/plan/SchemaTestOperator.java          |  33 +++
 .../stram/webapp/OperatorDiscoveryTest.java     |  64 ++++-
 .../src/test/resources/schemaTestTopology.json  |  43 ++++
 12 files changed, 473 insertions(+), 78 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/api/src/main/java/com/datatorrent/api/Context.java
----------------------------------------------------------------------
diff --git a/api/src/main/java/com/datatorrent/api/Context.java b/api/src/main/java/com/datatorrent/api/Context.java
index 1417389..249cecd 100644
--- a/api/src/main/java/com/datatorrent/api/Context.java
+++ b/api/src/main/java/com/datatorrent/api/Context.java
@@ -151,6 +151,13 @@ public interface Context
      * a generic codec.
      */
     Attribute<StreamCodec<?>> STREAM_CODEC = new Attribute<StreamCodec<?>>(new Object2String<StreamCodec<?>>());
+
+    /**
+     * Provides the tuple class which the port receives or emits. While this attribute is null by default,
+     * whether it is needed or not is controlled through the port annotation.
+     */
+    Attribute<Class<?>> TUPLE_CLASS = new Attribute<>(new Object2String<Class<?>>());
+
     @SuppressWarnings("FieldNameHidesFieldInSuperclass")
     long serialVersionUID = AttributeMap.AttributeInitializer.initialize(PortContext.class);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/api/src/main/java/com/datatorrent/api/annotation/InputPortFieldAnnotation.java
----------------------------------------------------------------------
diff --git a/api/src/main/java/com/datatorrent/api/annotation/InputPortFieldAnnotation.java b/api/src/main/java/com/datatorrent/api/annotation/InputPortFieldAnnotation.java
index 965eab3..2734bf6 100644
--- a/api/src/main/java/com/datatorrent/api/annotation/InputPortFieldAnnotation.java
+++ b/api/src/main/java/com/datatorrent/api/annotation/InputPortFieldAnnotation.java
@@ -16,8 +16,8 @@
 package com.datatorrent.api.annotation;
 
 import java.lang.annotation.*;
+
 /**
- *
  * Annotation for input ports on streaming operators.<p>
  *
  * @since 0.3.2
@@ -33,4 +33,12 @@ public @interface InputPortFieldAnnotation
    * @return - true if port is optional, false otherwise.
    */
   public boolean optional() default false;
+
+  /**
+   * Whether this port needs to know the tuple class. When true, application will have to set
+   * the port attribute- TUPLE_CLASS of the port otherwise dag validation will fail.
+   *
+   * @return true if schema is required; false otherwise.
+   */
+  public boolean schemaRequired() default false;
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/api/src/main/java/com/datatorrent/api/annotation/OutputPortFieldAnnotation.java
----------------------------------------------------------------------
diff --git a/api/src/main/java/com/datatorrent/api/annotation/OutputPortFieldAnnotation.java b/api/src/main/java/com/datatorrent/api/annotation/OutputPortFieldAnnotation.java
index 154c1df..bb585c6 100644
--- a/api/src/main/java/com/datatorrent/api/annotation/OutputPortFieldAnnotation.java
+++ b/api/src/main/java/com/datatorrent/api/annotation/OutputPortFieldAnnotation.java
@@ -21,6 +21,7 @@ import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
+import com.datatorrent.api.Context;
 
 /**
  *
@@ -40,4 +41,13 @@ public @interface OutputPortFieldAnnotation {
    * <p>error.</p>
    */
   public boolean error() default false;
+
+  /**
+   * Whether this port needs to know the tuple class. When true, application will have to set
+   * the port attribute- TUPLE_CLASS of the port otherwise dag validation will fail.
+   *
+   * @return  true if schema is required; false otherwise.
+   */
+  public boolean schemaRequired() default false;
 }
+

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java b/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
index 936ba25..eff2404 100644
--- a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
+++ b/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
@@ -3012,6 +3012,8 @@ public class DTCli
         JSONObject portClassHier = new JSONObject();
 
         JSONObject failed = new JSONObject();
+        JSONObject portTypesWithSchemaClasses = new JSONObject();
+
         for (Class<? extends Operator> clazz : operatorClasses) {
           try {
             JSONObject oper = operatorDiscoverer.describeOperator(clazz);
@@ -3021,8 +3023,15 @@ public class DTCli
             String s = defaultValueMapper.writeValueAsString(operIns);
             oper.put("defaultValue", new JSONObject(s).get(clazz.getName()));
             
-            // add class hier info to portClassHier
-            operatorDiscoverer.buildPortClassHier(oper, portClassHier);
+            // add class hierarchy info to portClassHier and fetch port types with schema classes
+            operatorDiscoverer.buildAdditionalPortInfo(oper, portClassHier, portTypesWithSchemaClasses);
+
+            Iterator portTypesIter = portTypesWithSchemaClasses.keys();
+            while (portTypesIter.hasNext()) {
+              if (!portTypesWithSchemaClasses.getBoolean((String) portTypesIter.next())) {
+                portTypesIter.remove();
+              }
+            }
 
             arr.put(oper);
           } catch (Exception | NoClassDefFoundError ex) {
@@ -3031,8 +3040,10 @@ public class DTCli
             failed.put(cls, ex.toString());
           }
         }
+
         json.put("operatorClasses", arr);
         json.put("portClassHier", portClassHier);
+        json.put("portTypesWithSchemaClasses", portTypesWithSchemaClasses);
         if (failed.length() > 0) {
           json.put("failedOperators", failed);
         }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
index b1e7d94..fc182cd 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
@@ -1170,6 +1170,13 @@ public class LogicalPlan implements Serializable, DAG
               validateThreadLocal(n);
             }
           }
+
+          if (pm.portAnnotation != null && pm.portAnnotation.schemaRequired()) {
+            //since schema is required, the port attribute TUPLE_CLASS should be present
+            if (pm.attributes.get(PortContext.TUPLE_CLASS) == null) {
+              throw new ValidationException("Attribute " + PortContext.TUPLE_CLASS.getName() + " missing on port : " + n.name + "." + pm.getPortName());
+            }
+          }
         }
       }
 
@@ -1179,6 +1186,14 @@ public class LogicalPlan implements Serializable, DAG
           if (pm.portAnnotation != null && !pm.portAnnotation.optional()) {
             throw new ValidationException("Output port connection required: " + n.name + "." + pm.getPortName());
           }
+        } else {
+          //port is connected
+          if (pm.portAnnotation != null && pm.portAnnotation.schemaRequired()) {
+            //since schema is required, the port attribute TUPLE_CLASS should be present
+            if (pm.attributes.get(PortContext.TUPLE_CLASS) == null) {
+              throw new ValidationException("Attribute " + PortContext.TUPLE_CLASS.getName() + " missing on port : " + n.name + "." + pm.getPortName());
+            }
+          }
         }
         allPortsOptional &= (pm.portAnnotation != null && pm.portAnnotation.optional());
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
index 3e3326b..d838a2d 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlanConfiguration.java
@@ -77,6 +77,7 @@ public class LogicalPlanConfiguration {
   public static final String STREAM_SINKS = "sinks";
   public static final String STREAM_TEMPLATE = "template";
   public static final String STREAM_LOCALITY = "locality";
+  public static final String STREAM_SCHEMA = "schema";
 
   public static final String OPERATOR_PREFIX =  StreamingApplication.DT_PREFIX + "operator.";
   public static final String OPERATOR_CLASSNAME = "classname";
@@ -908,6 +909,11 @@ public class LogicalPlanConfiguration {
       if (locality != null) {
         prop.setProperty(streamPrefix + STREAM_LOCALITY, locality);
       }
+      JSONObject schema = stream.optJSONObject("schema");
+      if (schema != null) {
+        String schemaClass = schema.getString("class");
+        prop.setProperty(streamPrefix + STREAM_SCHEMA, schemaClass);
+      }
     }
     return addFromProperties(prop, conf);
   }
@@ -1126,6 +1132,16 @@ public class LogicalPlanConfiguration {
       DAG.StreamMeta sd = dag.addStream(streamConfEntry.getKey());
       sd.setLocality(streamConf.getLocality());
 
+      String schemaClassName = streamConf.properties.getProperty(STREAM_SCHEMA);
+      Class<?> schemaClass = null;
+      if (schemaClassName != null) {
+        try {
+          schemaClass = Class.forName(schemaClassName);
+        } catch (ClassNotFoundException e) {
+          throw new ValidationException("schema class not found: " + schemaClassName);
+        }
+      }
+
       if (streamConf.sourceNode != null) {
         String portName = null;
         for (Map.Entry<String, StreamConf> e : streamConf.sourceNode.outputs.entrySet()) {
@@ -1137,6 +1153,10 @@ public class LogicalPlanConfiguration {
         Operators.PortMappingDescriptor sourcePortMap = new Operators.PortMappingDescriptor();
         Operators.describe(sourceDecl, sourcePortMap);
         sd.setSource(sourcePortMap.outputPorts.get(portName).component);
+
+        if (schemaClass != null) {
+          dag.setOutputPortAttribute(sourcePortMap.outputPorts.get(portName).component, PortContext.TUPLE_CLASS, schemaClass);
+        }
       }
 
       for (OperatorConf targetNode : streamConf.targetNodes) {
@@ -1150,6 +1170,10 @@ public class LogicalPlanConfiguration {
         Operators.PortMappingDescriptor targetPortMap = new Operators.PortMappingDescriptor();
         Operators.describe(targetDecl, targetPortMap);
         sd.addSink(targetPortMap.inputPorts.get(portName).component);
+
+        if (schemaClass != null) {
+          dag.setInputPortAttribute(targetPortMap.inputPorts.get(portName).component, PortContext.TUPLE_CLASS, schemaClass);
+        }
       }
     }
 
@@ -1164,7 +1188,7 @@ public class LogicalPlanConfiguration {
    */
   public void prepareDAG(LogicalPlan dag, StreamingApplication app, String name)
   {
-    // EVENTUALLY to be replaced by variable enabled configuration in the demo where the attt below is used 
+    // EVENTUALLY to be replaced by variable enabled configuration in the demo where the attribute below is used
     String connectAddress = conf.get(StreamingApplication.DT_PREFIX + Context.DAGContext.GATEWAY_CONNECT_ADDRESS.getName());
     dag.setAttribute(Context.DAGContext.GATEWAY_CONNECT_ADDRESS, connectAddress == null? conf.get(GATEWAY_LISTEN_ADDRESS): connectAddress);
     if (app != null) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java b/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
index 60e35da..004c100 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
@@ -22,8 +22,10 @@ import com.datatorrent.stram.webapp.TypeDiscoverer.UI_TYPE;
 import com.datatorrent.stram.webapp.asm.CompactAnnotationNode;
 import com.datatorrent.stram.webapp.asm.CompactFieldNode;
 import com.google.common.base.Predicate;
+import com.google.common.base.Splitter;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
 import java.beans.*;
@@ -44,7 +46,6 @@ import javax.xml.parsers.*;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.text.WordUtils;
 import org.codehaus.jettison.json.*;
-import org.apache.xbean.asm5.tree.AnnotationNode;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.xml.sax.Attributes;
@@ -60,6 +61,8 @@ import org.xml.sax.helpers.DefaultHandler;
  */
 public class OperatorDiscoverer
 {
+  public static final String GENERATED_CLASSES_JAR = "_generated-classes.jar";
+
   private static class ClassComparator implements Comparator<Class<?>> {
 
     @Override
@@ -73,21 +76,34 @@ public class OperatorDiscoverer
   private static final Logger LOG = LoggerFactory.getLogger(OperatorDiscoverer.class);
   private final List<String> pathsToScan = new ArrayList<String>();
   private final ClassLoader classLoader;
-  private final String dtOperatorDoclinkPrefix = "https://www.datatorrent.com/docs/apidocs/index.html";
+  private static final String DT_OPERATOR_DOCLINK_PREFIX = "https://www.datatorrent.com/docs/apidocs/index.html";
   public static final String PORT_TYPE_INFO_KEY = "portTypeInfo";
   private final TypeGraph typeGraph = TypeGraphFactory.createTypeGraphProtoType();
 
+  private static final String USE_SCHEMA_TAG = "@useSchema";
+  private static final String DESCRIPTION_TAG = "@description";
+  private static final Pattern WHITESPACE_PATTERN = Pattern.compile("\\s+?");
+
+  private static final String SCHEMA_REQUIRED_KEY = "schemaRequired";
+
   private final Map<String, OperatorClassInfo> classInfo = new HashMap<String, OperatorClassInfo>();
 
   private static class OperatorClassInfo {
     String comment;
     final Map<String, String> tags = new HashMap<String, String>();
-    final Map<String, String> getMethods = new HashMap<String, String>();
-    final Map<String, String> setMethods = new HashMap<String, String>();
+    final Map<String, MethodInfo> getMethods = Maps.newHashMap();
+    final Map<String, MethodInfo> setMethods = Maps.newHashMap();
     final Set<String> invisibleGetSetMethods = new HashSet<String>();
     final Map<String, String> fields = new HashMap<String, String>();
   }
 
+  private static class MethodInfo
+  {
+    Map<String, String> descriptions = Maps.newHashMap();
+    Map<String, String> useSchemas = Maps.newHashMap();
+    String comment;
+  }
+
   private class JavadocSAXHandler extends DefaultHandler {
 
     private String className = null;
@@ -122,11 +138,19 @@ public class OperatorDiscoverer
       else if (qName.equalsIgnoreCase("tag")) {
         if (oci != null) {
           String tagName = attributes.getValue("name");
-          String tagText = attributes.getValue("text");
+          String tagText = attributes.getValue("text").trim();
           if (methodName != null) {
-            if("@omitFromUI".equals(tagName) && (isGetter(methodName) || isSetter(methodName)))
-            {
-              oci.invisibleGetSetMethods.add(methodName);
+            boolean lGetterCheck = isGetter(methodName);
+            boolean lSetterCheck = !lGetterCheck && isSetter(methodName);
+
+            if (lGetterCheck || lSetterCheck) {
+              if ("@omitFromUI".equals(tagName)) {
+                oci.invisibleGetSetMethods.add(methodName);
+              } else if (DESCRIPTION_TAG.equals(tagName)) {
+                addTagToMethod(lGetterCheck ? oci.getMethods : oci.setMethods, tagText, true);
+              } else if (USE_SCHEMA_TAG.equals(tagName)) {
+                addTagToMethod(lGetterCheck ? oci.getMethods : oci.setMethods, tagText, false);
+              }
             }
 //            if ("@return".equals(tagName) && isGetter(methodName)) {
 //              oci.getMethods.put(methodName, tagText);
@@ -149,6 +173,24 @@ public class OperatorDiscoverer
       }
     }
 
+    private void addTagToMethod(Map<String, MethodInfo> methods, String tagText, boolean isDescription)
+    {
+      MethodInfo mi = methods.get(methodName);
+      if (mi == null) {
+        mi = new MethodInfo();
+        methods.put(methodName, mi);
+      }
+      String[] tagParts = Iterables.toArray(Splitter.on(WHITESPACE_PATTERN).trimResults().omitEmptyStrings().
+        limit(2).split(tagText), String.class);
+      if (tagParts.length == 2) {
+        if (isDescription) {
+          mi.descriptions.put(tagParts[0], tagParts[1]);
+        } else {
+          mi.useSchemas.put(tagParts[0], tagParts[1]);
+        }
+      }
+    }
+
     @Override
     public void endElement(String uri, String localName, String qName) throws SAXException {
       if (qName.equalsIgnoreCase("class")) {
@@ -160,9 +202,19 @@ public class OperatorDiscoverer
         if (methodName != null) {
           // do nothing
           if (isGetter(methodName)) {
-            oci.getMethods.put(methodName, comment.toString());
+            MethodInfo mi = oci.getMethods.get(methodName);
+            if (mi == null) {
+              mi = new MethodInfo();
+              oci.getMethods.put(methodName, mi);
+            }
+            mi.comment = comment.toString();
           } else if (isSetter(methodName)) {
-            oci.setMethods.put(methodName, comment.toString());
+            MethodInfo mi = oci.setMethods.get(methodName);
+            if (mi == null) {
+              mi = new MethodInfo();
+              oci.setMethods.put(methodName, mi);
+            }
+            mi.comment = comment.toString();
           }
         }
         else if (fieldName != null) {
@@ -236,7 +288,7 @@ public class OperatorDiscoverer
   {
     Map<String, JarFile> openJarFiles = new HashMap<String, JarFile>();
     Map<String, File> openClassFiles = new HashMap<String, File>();
-    try { 
+    try {
       for (String path : pathsToScan) {
         File f = null;
         try {
@@ -244,6 +296,9 @@ public class OperatorDiscoverer
           if (!f.exists() || f.isDirectory() || (!f.getName().endsWith("jar") && !f.getName().endsWith("class"))) {
             continue;
           }
+          if (GENERATED_CLASSES_JAR.equals(f.getName())) {
+            continue;
+          }
           if (f.getName().endsWith("class")) {
             typeGraph.addNode(f);
             openClassFiles.put(path, f);
@@ -410,6 +465,9 @@ public class OperatorDiscoverer
           if (!inputPort.has("optional")) {
             inputPort.put("optional", false); // input port that is not annotated is default to be not optional
           }
+          if (!inputPort.has(SCHEMA_REQUIRED_KEY)) {
+            inputPort.put(SCHEMA_REQUIRED_KEY, false);
+          }
           inputPorts.put(inputPort);
         }
 
@@ -422,6 +480,9 @@ public class OperatorDiscoverer
           if (!outputPort.has("error")) {
             outputPort.put("error", false);
           }
+          if (!outputPort.has(SCHEMA_REQUIRED_KEY)) {
+            outputPort.put(SCHEMA_REQUIRED_KEY, false);
+          }
           outputPorts.put(outputPort);
         }
 
@@ -471,7 +532,7 @@ public class OperatorDiscoverer
           }
           else if (clazz.getName().startsWith("com.datatorrent.lib.") ||
                   clazz.getName().startsWith("com.datatorrent.contrib.")) {
-            response.put("doclink", dtOperatorDoclinkPrefix + "?" + getDocName(clazz));
+            response.put("doclink", DT_OPERATOR_DOCLINK_PREFIX + "?" + getDocName(clazz));
           }
         }
       }
@@ -531,10 +592,10 @@ public class OperatorDiscoverer
       if (oci.invisibleGetSetMethods.contains(getPrefix + propName) || oci.invisibleGetSetMethods.contains(setPrefix + propName)) {
         continue;
       }
-      String desc = oci.setMethods.get(setPrefix + propName);
-      desc = desc == null ? oci.getMethods.get(getPrefix + propName) : desc;
-      if (desc != null) {
-        propJ.put("description", desc);
+      MethodInfo methodInfo = oci.setMethods.get(setPrefix + propName);
+      methodInfo = methodInfo == null ? oci.getMethods.get(getPrefix + propName) : methodInfo;
+      if (methodInfo != null) {
+        addTagsToProperties(methodInfo, propJ);
       }
       result.put(propJ);
     }
@@ -553,6 +614,32 @@ public class OperatorDiscoverer
     }
   }
 
+  private void addTagsToProperties(MethodInfo mi, JSONObject propJ) throws JSONException
+  {
+    //create description object. description tag enables the visual tools to display description of keys/values
+    //of a map property, items of a list, properties within a complex type.
+    JSONObject descriptionObj = new JSONObject();
+    if (mi.comment != null) {
+      descriptionObj.put("$", mi.comment);
+    }
+    for (Map.Entry<String, String> descEntry : mi.descriptions.entrySet()) {
+      descriptionObj.put(descEntry.getKey(), descEntry.getValue());
+    }
+    if (descriptionObj.length() > 0) {
+      propJ.put("descriptions", descriptionObj);
+    }
+
+    //create useSchema object. useSchema tag is added to enable visual tools to be able to render a text field
+    //as a dropdown with choices populated from the schema attached to the port.
+    JSONObject useSchemaObj = new JSONObject();
+    for (Map.Entry<String, String> useSchemaEntry : mi.useSchemas.entrySet()) {
+      useSchemaObj.put(useSchemaEntry.getKey(), useSchemaEntry.getValue());
+    }
+    if (useSchemaObj.length() > 0) {
+      propJ.put("useSchema", useSchemaObj);
+    }
+  }
+
   public JSONObject describeClass(String clazzName) throws Exception
   {
     return describeClassByASM(clazzName);
@@ -626,9 +713,9 @@ public class OperatorDiscoverer
               for (Class<?> c = clazz; c != null; c = c.getSuperclass()) {
                 OperatorClassInfo oci = classInfo.get(c.getName());
                 if (oci != null) {
-                  String getMethodDesc = oci.getMethods.get(readMethod.getName());
-                  if (getMethodDesc != null) {
-                    propertyObj.put("description", oci.getMethods.get(readMethod.getName()));
+                  MethodInfo getMethodInfo = oci.getMethods.get(readMethod.getName());
+                  if (getMethodInfo != null) {
+                    addTagsToProperties(getMethodInfo, propertyObj);
                     break;
                   }
                 }
@@ -673,74 +760,79 @@ public class OperatorDiscoverer
 
   /**
    * Enrich portClassHier with class/interface names that map to a list of parent classes/interfaces.
-   * For any class encountered, find its parents too.
+   * For any class encountered, find its parents too.<br/>
+   * Also find the port types which have assignable schema classes.
    *
-   * @param oper Operator to work on
-   * @param portClassHier In-Out param that contains a mapping of class/interface to its parents
+   * @param oper                       Operator to work on
+   * @param portClassHierarchy         In-Out param that contains a mapping of class/interface to its parents
+   * @param portTypesWithSchemaClasses Json that will contain all the ports which have any schema classes.
    */
-  public void buildPortClassHier(JSONObject oper, JSONObject portClassHier) {
+  public void buildAdditionalPortInfo(JSONObject oper, JSONObject portClassHierarchy, JSONObject portTypesWithSchemaClasses)
+  {
     try {
       JSONArray ports = oper.getJSONArray(OperatorDiscoverer.PORT_TYPE_INFO_KEY);
-      int num_ports = ports.length();
-      for (int i = 0; i < num_ports; i++) {
+      for (int i = 0; i < ports.length(); i++) {
         JSONObject port = ports.getJSONObject(i);
 
-        String type;
-        try {
-          type = port.getString("type");
-        } catch (JSONException e) {
-          // no type key
+        String portType = port.optString("type");
+        if (portType == null) {
+          //skipping if port type is null
           continue;
         }
 
-        try {
-          // load the port type class
-          Class<?> portClazz = classLoader.loadClass(type.replaceAll("\\bclass ", "").replaceAll("\\binterface ", ""));
-
-          // iterate up the class hierarchy to populate the portClassHier map
-          while (portClazz != null) {
-            ArrayList<String> parents = new ArrayList<String>();
+        if (typeGraph.size() == 0) {
+          buildTypeGraph();
+        }
 
-            String portClazzName = portClazz.toString();
-            if (portClassHier.has(portClazzName)) {
-              // already present in portClassHier, so we can stop
-              break;
+        try {
+          //building port class hierarchy
+          LinkedList<String> queue = Lists.newLinkedList();
+          queue.add(portType);
+          while (!queue.isEmpty()) {
+            String currentType = queue.remove();
+            if (portClassHierarchy.has(currentType)) {
+              //already present in the json so we skip.
+              continue;
             }
-
-            // interfaces and Object are at the top of the tree, so we can just put them
-            // in portClassHier with empty parents, then move on.
-            if (portClazz.isInterface() || portClazzName.equals("java.lang.Object")) {
-              portClassHier.put(portClazzName, parents);
-              break;
+            List<String> immediateParents = typeGraph.getParents(currentType);
+            if (immediateParents == null) {
+              portClassHierarchy.put(currentType, Lists.<String>newArrayList());
+              continue;
             }
+            portClassHierarchy.put(currentType, immediateParents);
+            queue.addAll(immediateParents);
+          }
+        } catch (JSONException e) {
+          LOG.warn("building port type hierarchy {}", portType, e);
+        }
 
-            // look at superclass first
-            Class<?> superClazz = portClazz.getSuperclass();
-            try {
-              String superClazzName = superClazz.toString();
-              parents.add(superClazzName);
-            } catch (NullPointerException e) {
-              LOG.info("Superclass is null for `{}` ({})", portClazz, superClazz);
-            }
-            // then look at interfaces implemented in this port
-            for (Class<?> intf : portClazz.getInterfaces()) {
-              String intfName = intf.toString();
-              if (!portClassHier.has(intfName)) {
-                // add the interface to portClassHier
-                portClassHier.put(intfName, new ArrayList<String>());
-              }
-              parents.add(intfName);
+        //finding port types with schema classes
+        if (portTypesWithSchemaClasses.has(portType)) {
+          //already present in the json so skipping
+          continue;
+        }
+        if (portType.equals("byte") || portType.equals("short") || portType.equals("char") || portType.equals("int")
+          || portType.equals("long") || portType.equals("float") || portType.equals("double")
+          || portType.equals("java.lang.String") || portType.equals("java.lang.Object")) {
+          //ignoring primitives, strings and object types as this information is needed only for complex types.
+          continue;
+        }
+        if (port.has("typeArgs")) {
+          //ignoring any type with generics
+          continue;
+        }
+        boolean hasSchemaClasses = false;
+        for (String descendant : typeGraph.getInstantiableDescendants(portType)) {
+          try {
+            if (typeGraph.isInstantiableBean(descendant)) {
+              hasSchemaClasses = true;
+              break;
             }
-
-            // now store class=>parents mapping in portClassHier
-            portClassHier.put(portClazzName, parents);
-
-            // walk up the hierarchy for the next iteration
-            portClazz = superClazz;
+          } catch (JSONException ex) {
+            LOG.warn("checking descendant is instantiable {}", descendant);
           }
-        } catch (ClassNotFoundException e) {
-          LOG.info("Could not make class from `{}`", type);
         }
+        portTypesWithSchemaClasses.put(portType, hasSchemaClasses);
       }
     } catch (JSONException e) {
       // should not reach this
@@ -763,5 +855,4 @@ public class OperatorDiscoverer
     return typeGraph;
   }
 
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java b/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
index b06bb76..61dc99d 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
@@ -1152,4 +1152,32 @@ public class TypeGraph
     return result;
   }
 
+  /**
+   * A utility method that tells whether a class is considered a bean.<br/>
+   * For simplicity we exclude classes that have any type-args.
+   *
+   * @param className name of the class
+   * @return true if it is a bean false otherwise.
+   */
+  public boolean isInstantiableBean(String className) throws JSONException
+  {
+    JSONObject classDesc = describeClass(className);
+    if (classDesc.has("typeArgs")) {
+      //any type with generics is not considered a bean
+      return false;
+    }
+    JSONArray classProps = classDesc.optJSONArray("properties");
+    if (classProps == null || classProps.length() == 0) {
+      //no properties then cannot be a bean
+      return false;
+    }
+    for (int p = 0; p < classProps.length(); p++) {
+      JSONObject propDesc = classProps.getJSONObject(p);
+      if (propDesc.optBoolean("canGet", false)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java b/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
index c46fb5b..af12575 100644
--- a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
@@ -21,6 +21,8 @@ import java.io.StringWriter;
 import java.lang.reflect.Field;
 import java.util.*;
 
+import javax.validation.ValidationException;
+
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
@@ -682,6 +684,63 @@ public class LogicalPlanConfigurationTest {
     }
   }
 
+  @Test
+  public void testTupleClassAttr() throws Exception
+  {
+    String resourcePath = "/schemaTestTopology.json";
+    InputStream is = this.getClass().getResourceAsStream(resourcePath);
+    if (is == null) {
+      fail("Could not load " + resourcePath);
+    }
+    StringWriter writer = new StringWriter();
+
+    IOUtils.copy(is, writer);
+    JSONObject json = new JSONObject(writer.toString());
+
+    Configuration conf = new Configuration(false);
+
+    LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
+    LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
+    dag.validate();
+
+    OperatorMeta operator1 = dag.getOperatorMeta("operator1");
+    assertEquals("operator1.classname", SchemaTestOperator.class, operator1.getOperator().getClass());
+
+    StreamMeta input1 = dag.getStream("inputStream");
+    assertNotNull(input1);
+    for (LogicalPlan.InputPortMeta targetPort : input1.getSinks()) {
+      Assert.assertEquals("tuple class name required", TestSchema.class, targetPort.getAttributes().get(PortContext.TUPLE_CLASS));
+    }
+  }
+
+  @Test
+  public void testTupleClassAttrValidation() throws Exception
+  {
+    String resourcePath = "/schemaTestTopology.json";
+    InputStream is = this.getClass().getResourceAsStream(resourcePath);
+    if (is == null) {
+      fail("Could not load " + resourcePath);
+    }
+    StringWriter writer = new StringWriter();
+
+    IOUtils.copy(is, writer);
+    JSONObject json = new JSONObject(writer.toString());
+
+    //removing schema so that validation fails
+    json.getJSONArray("streams").getJSONObject(0).remove("schema");
+    Configuration conf = new Configuration(false);
+
+    LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
+    LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
+
+    try {
+      dag.validate();
+      Assert.fail();
+    } catch (ValidationException ve) {
+      //test pass as validation exception was thrown.
+    }
+  }
+
   private static final Logger logger = LoggerFactory.getLogger(LogicalPlanConfigurationTest.class);
 
   public static class TestApplication implements StreamingApplication {
@@ -789,7 +848,11 @@ public class LogicalPlanConfigurationTest {
         return false;
       return true;
     }
-    
+
+  }
+
+  public static class TestSchema
+  {
   }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/test/java/com/datatorrent/stram/plan/SchemaTestOperator.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/SchemaTestOperator.java b/engine/src/test/java/com/datatorrent/stram/plan/SchemaTestOperator.java
new file mode 100644
index 0000000..59aaade
--- /dev/null
+++ b/engine/src/test/java/com/datatorrent/stram/plan/SchemaTestOperator.java
@@ -0,0 +1,33 @@
+/**
+ * Copyright (C) 2015 DataTorrent, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *         http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.datatorrent.stram.plan;
+
+import com.datatorrent.api.DefaultInputPort;
+import com.datatorrent.api.annotation.InputPortFieldAnnotation;
+
+import com.datatorrent.stram.engine.GenericTestOperator;
+
+public class SchemaTestOperator extends GenericTestOperator
+{
+  @InputPortFieldAnnotation(schemaRequired = true)
+  final public transient InputPort<Object> schemaRequiredPort = new DefaultInputPort<Object>()
+  {
+    @Override
+    final public void process(Object payload)
+    {
+    }
+  };
+}

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
index 8f8b632..ad915c8 100644
--- a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
@@ -41,6 +41,8 @@ import com.datatorrent.api.DefaultOutputPort;
 import com.datatorrent.api.annotation.InputPortFieldAnnotation;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.api.InputOperator;
+
 import com.datatorrent.stram.plan.logical.LogicalPlan;
 import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
 import com.datatorrent.stram.plan.logical.LogicalPlanConfiguration;
@@ -170,7 +172,7 @@ public class OperatorDiscoveryTest
     OperatorDiscoverer operatorDiscoverer = new OperatorDiscoverer(classFilePath);
     operatorDiscoverer.buildTypeGraph();
 
-    // make sure (de)serialization of type graph works withtout problem
+    // make sure (de)serialization of type graph works without problem
     Kryo kryo = new Kryo();
     TypeGraph.TypeGraphSerializer tgs = new TypeGraph.TypeGraphSerializer();
     kryo.register(TypeGraph.class, tgs);
@@ -1033,4 +1035,64 @@ public class OperatorDiscoveryTest
     
   }
 
+  public static class SchemaRequiredOperator extends BaseOperator implements InputOperator
+  {
+    @OutputPortFieldAnnotation(schemaRequired = true)
+    public final transient DefaultOutputPort<Object> output = new DefaultOutputPort<Object>();
+
+    @OutputPortFieldAnnotation(schemaRequired = false)
+    public final transient DefaultOutputPort<Object> output1 = new DefaultOutputPort<Object>();
+
+    public final transient DefaultOutputPort<Object> output2 = new DefaultOutputPort<Object>();
+
+    @Override
+    public void emitTuples()
+    {
+    }
+  }
+
+  @Test
+  public void testPortSchema() throws Exception
+  {
+    String[] classFilePath = getClassFileInClasspath();
+    OperatorDiscoverer od = new OperatorDiscoverer(classFilePath);
+    od.buildTypeGraph();
+    JSONObject operatorJson = od.describeOperator(SchemaRequiredOperator.class);
+    JSONArray portsJson = operatorJson.getJSONArray("outputPorts");
+
+    Assert.assertEquals("no. of ports", 3, portsJson.length());
+
+    for (int i = 0; i < portsJson.length(); i++) {
+      JSONObject portJson = portsJson.getJSONObject(i);
+      String name = portJson.getString("name");
+      if (name.equals("output")) {
+        Assert.assertEquals("output schema", true, portJson.getBoolean("schemaRequired"));
+      } else if (name.equals("output1")) {
+        Assert.assertEquals("output1 schema", false, portJson.getBoolean("schemaRequired"));
+      } else if (name.equals("output2")) {
+        Assert.assertEquals("output2 schema", false, portJson.getBoolean("schemaRequired"));
+      }
+    }
+  }
+
+  @Test
+  public void testAdditionalPortInfo() throws Exception
+  {
+    String[] classFilePath = getClassFileInClasspath();
+    OperatorDiscoverer operatorDiscoverer = new OperatorDiscoverer(classFilePath);
+    operatorDiscoverer.buildTypeGraph();
+    JSONObject operator = operatorDiscoverer.describeOperator(SubSubClassGeneric.class);
+
+    JSONObject portClassHierarchy = new JSONObject();
+    JSONObject portsWithSchemaClasses = new JSONObject();
+    operatorDiscoverer.buildAdditionalPortInfo(operator, portClassHierarchy, portsWithSchemaClasses);
+
+    JSONArray stringTypeArray = portClassHierarchy.optJSONArray("java.lang.String");
+    Assert.assertNotNull("string hierarchy", stringTypeArray);
+
+    Assert.assertEquals("number of immediate ancestors", 4, stringTypeArray.length());
+
+    Assert.assertEquals("number of port types with schema", 0, portsWithSchemaClasses.length());
+  }
 }
+

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/61929b58/engine/src/test/resources/schemaTestTopology.json
----------------------------------------------------------------------
diff --git a/engine/src/test/resources/schemaTestTopology.json b/engine/src/test/resources/schemaTestTopology.json
new file mode 100644
index 0000000..6c779fd
--- /dev/null
+++ b/engine/src/test/resources/schemaTestTopology.json
@@ -0,0 +1,43 @@
+{
+  "operators": [
+    {
+      "name": "inputOperator",
+      "class": "com.datatorrent.stram.engine.TestGeneratorInputOperator",
+      "properties": {
+        "com.datatorrent.stram.engine.TestGeneratorInputOperator": {
+          "myConfigProperty": "myConfigPropertyValue"
+        }
+      },
+      "ports": [
+        {
+          "name": "outport",
+          "attributes": {
+            "UNIFIER_LIMIT": 8
+          }
+        }
+      ]
+    },
+    {
+      "name": "operator1",
+      "class": "com.datatorrent.stram.plan.SchemaTestOperator"
+    }
+  ],
+  "streams": [
+    {
+      "name": "inputStream",
+      "source": {
+        "operatorName": "inputOperator",
+        "portName": "outport"
+      },
+      "sinks": [
+        {
+          "operatorName": "operator1",
+          "portName": "schemaRequiredPort"
+        }
+      ],
+      "schema": {
+        "class": "com.datatorrent.stram.plan.LogicalPlanConfigurationTest$TestSchema"
+      }
+    }
+  ]
+}



[05/50] incubator-apex-core git commit: Merge pull request #106 from vrozov/v3.1.0

Posted by ch...@apache.org.
Merge pull request #106 from vrozov/v3.1.0

APEX-29 #resolve Use DefaultEventLoop.createEventLoop factory

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/a3e9dfa4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/a3e9dfa4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/a3e9dfa4

Branch: refs/heads/master
Commit: a3e9dfa428ff2aa169671027c142e67837cb72f5
Parents: 66a75e0 8ae64ab
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Tue Aug 4 15:54:59 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Tue Aug 4 15:54:59 2015 -0700

----------------------------------------------------------------------
 api/pom.xml                                                 | 2 +-
 .../java/com/datatorrent/bufferserver/server/Server.java    | 2 +-
 .../main/java/com/datatorrent/bufferserver/util/System.java | 2 +-
 .../com/datatorrent/bufferserver/client/SubscriberTest.java | 4 ++--
 .../com/datatorrent/bufferserver/server/ServerTest.java     | 4 ++--
 .../datatorrent/bufferserver/storage/DiskStorageTest.java   | 9 +++++----
 .../com/datatorrent/stram/engine/StreamingContainer.java    | 2 +-
 .../java/com/datatorrent/stram/stream/FastPublisher.java    | 2 ++
 .../java/com/datatorrent/stram/stream/FastStreamTest.java   | 6 ++++--
 .../java/com/datatorrent/stram/stream/SocketStreamTest.java | 8 ++++----
 10 files changed, 23 insertions(+), 18 deletions(-)
----------------------------------------------------------------------



[17/50] incubator-apex-core git commit: fixed tests

Posted by ch...@apache.org.
fixed tests


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/1617ca39
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/1617ca39
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/1617ca39

Branch: refs/heads/master
Commit: 1617ca393c1e066349282dfb6d9778aea8c67177
Parents: c5d819b
Author: Gaurav <ga...@datatorrent.com>
Authored: Thu Aug 6 17:31:21 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Thu Aug 6 21:12:51 2015 -0700

----------------------------------------------------------------------
 .../stram/StreamingContainerManagerTest.java        |  7 +++----
 .../datatorrent/stram/engine/AtLeastOnceTest.java   | 16 ++++++++++++++++
 .../stram/engine/StreamingContainerTest.java        |  6 ++++++
 .../datatorrent/stram/stream/OiOEndWindowTest.java  |  5 +++++
 4 files changed, 30 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/1617ca39/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
index 38a54f0..a238e3e 100644
--- a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
@@ -740,8 +740,7 @@ public class StreamingContainerManagerTest {
   @Test
   public void testPhysicalPropertyUpdate() throws Exception{
     LogicalPlan dag = new LogicalPlan();
-    String workingDir = new File("target/testPhysicalPropertyUpdate").getAbsolutePath();
-    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
     TestGeneratorInputOperator o1 = dag.addOperator("o1", TestGeneratorInputOperator.class);
     GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
     dag.addStream("o1.outport", o1.outport, o2.inport1);
@@ -784,8 +783,7 @@ public class StreamingContainerManagerTest {
 
   private void testAppDataSources(LogicalPlan dag, boolean appendQIDToTopic) throws Exception
   {
-    String workingDir = new File("target/testAppDataSources").getAbsolutePath();
-    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
     StramLocalCluster lc = new StramLocalCluster(dag);
     lc.runAsync();
     StreamingContainerManager dnmgr = lc.dnmgr;
@@ -859,6 +857,7 @@ public class StreamingContainerManagerTest {
     try {
       server.start();
       LogicalPlan dag = new LogicalPlan();
+      dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
       TestGeneratorInputOperator o1 = dag.addOperator("o1", TestGeneratorInputOperator.class);
       GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
       dag.addStream("o1.outport", o1.outport, o2.inport1);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/1617ca39/engine/src/test/java/com/datatorrent/stram/engine/AtLeastOnceTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/AtLeastOnceTest.java b/engine/src/test/java/com/datatorrent/stram/engine/AtLeastOnceTest.java
index 01cc675..f32be13 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/AtLeastOnceTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/AtLeastOnceTest.java
@@ -15,6 +15,7 @@
  */
 package com.datatorrent.stram.engine;
 
+import java.io.File;
 import java.io.IOException;
 
 import org.junit.After;
@@ -24,7 +25,10 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Context;
 import com.datatorrent.api.DAG.Locality;
+
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.StramLocalCluster;
 import com.datatorrent.stram.engine.ProcessingModeTests.CollectorOperator;
 import com.datatorrent.stram.plan.logical.LogicalPlan;
@@ -56,6 +60,10 @@ public class AtLeastOnceTest
     CollectorOperator.collection.clear();
     int maxTuples = 30;
     LogicalPlan dag = new LogicalPlan();
+    String workingDir = new File("target/testInputOperatorRecovery").getAbsolutePath();
+    AsyncFSStorageAgent asyncFSStorageAgent = new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null);
+    asyncFSStorageAgent.setSyncCheckpoint(true);
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, asyncFSStorageAgent);
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
@@ -79,6 +87,10 @@ public class AtLeastOnceTest
     CollectorOperator.collection.clear();
     int maxTuples = 30;
     LogicalPlan dag = new LogicalPlan();
+    String workingDir = new File("target/testOperatorRecovery").getAbsolutePath();
+    AsyncFSStorageAgent asyncFSStorageAgent = new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null);
+    asyncFSStorageAgent.setSyncCheckpoint(true);
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, asyncFSStorageAgent);
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
@@ -103,6 +115,10 @@ public class AtLeastOnceTest
     CollectorOperator.collection.clear();
     int maxTuples = 30;
     LogicalPlan dag = new LogicalPlan();
+    String workingDir = new File("target/testOperatorRecovery").getAbsolutePath();
+    AsyncFSStorageAgent asyncFSStorageAgent = new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null);
+    asyncFSStorageAgent.setSyncCheckpoint(true);
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, asyncFSStorageAgent);
     //dag.getAttributes().get(DAG.HEARTBEAT_INTERVAL_MILLIS, 400);
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/1617ca39/engine/src/test/java/com/datatorrent/stram/engine/StreamingContainerTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/StreamingContainerTest.java b/engine/src/test/java/com/datatorrent/stram/engine/StreamingContainerTest.java
index 911f69a..7d37429 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/StreamingContainerTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/StreamingContainerTest.java
@@ -15,6 +15,7 @@
  */
 package com.datatorrent.stram.engine;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -24,7 +25,10 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.BaseOperator;
+
+import com.datatorrent.api.Context;
 import com.datatorrent.api.Context.DAGContext;
 import com.datatorrent.api.InputOperator;
 import com.datatorrent.api.Operator.CheckpointListener;
@@ -42,6 +46,8 @@ public class StreamingContainerTest
   public void testCommitted() throws IOException, ClassNotFoundException
   {
     LogicalPlan lp = new LogicalPlan();
+    String workingDir = new File("target/testCommitted").getAbsolutePath();
+    lp.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     lp.setAttribute(DAGContext.CHECKPOINT_WINDOW_COUNT, 1);
     CommitAwareOperator operator = lp.addOperator("CommitAwareOperator", new CommitAwareOperator());
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/1617ca39/engine/src/test/java/com/datatorrent/stram/stream/OiOEndWindowTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/stream/OiOEndWindowTest.java b/engine/src/test/java/com/datatorrent/stram/stream/OiOEndWindowTest.java
index 38f7a0b..a4e9c43 100644
--- a/engine/src/test/java/com/datatorrent/stram/stream/OiOEndWindowTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/stream/OiOEndWindowTest.java
@@ -15,6 +15,9 @@
  */
 package com.datatorrent.stram.stream;
 
+import java.io.File;
+
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.api.*;
 import org.junit.Assert;
@@ -93,6 +96,8 @@ public class OiOEndWindowTest
   public void validateOiOImplementation() throws Exception
   {
     LogicalPlan lp = new LogicalPlan();
+    String workingDir = new File("target/validateOiOImplementation").getAbsolutePath();
+    lp.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     TestInputOperator io = lp.addOperator("Input Operator", new TestInputOperator());
     FirstGenericOperator go = lp.addOperator("First Generic Operator", new FirstGenericOperator());
     SecondGenericOperator out = lp.addOperator("Second Generic Operator", new SecondGenericOperator());


[22/50] incubator-apex-core git commit: APEX-12 #resolve #comment filter BaseOperator from introspection

Posted by ch...@apache.org.
APEX-12 #resolve #comment filter BaseOperator from introspection


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/b2a606bc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/b2a606bc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/b2a606bc

Branch: refs/heads/master
Commit: b2a606bcd76f89af4bd42525670765fce01bccd3
Parents: 3f8f97e
Author: siyuan <si...@datatorrent.com>
Authored: Mon Aug 10 12:52:33 2015 -0700
Committer: siyuan <si...@datatorrent.com>
Committed: Mon Aug 10 12:52:33 2015 -0700

----------------------------------------------------------------------
 .../com/datatorrent/stram/webapp/TypeGraph.java     |  5 ++++-
 .../stram/webapp/OperatorDiscoveryTest.java         | 16 ++++++++++++++--
 2 files changed, 18 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b2a606bc/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java b/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
index 61dc99d..8d7e346 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
@@ -38,6 +38,8 @@ import org.slf4j.LoggerFactory;
 
 import com.datatorrent.api.Component;
 import com.datatorrent.api.Operator;
+
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.netlet.util.DTThrowable;
 import com.datatorrent.stram.webapp.asm.ClassNodeType;
 import com.datatorrent.stram.webapp.asm.ClassSignatureVisitor;
@@ -73,7 +75,8 @@ public class TypeGraph
   public static final String[] EXCLUDE_CLASSES = {Object.class.getName().replace('.', '/'), 
     Enum.class.getName().replace('.', '/'), 
     Operator.class.getName().replace('.', '/'),
-    Component.class.getName().replace('.', '/')};
+    Component.class.getName().replace('.', '/'),
+    BaseOperator.class.getName().replace('.', '/')};
 
   public static final ImmutableSet<String> JACKSON_INSTANTIABLE_CLASSES;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b2a606bc/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
index 8baa08a..d937ad7 100644
--- a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
@@ -15,6 +15,9 @@
  */
 package com.datatorrent.stram.webapp;
 
+import java.beans.BeanInfo;
+import java.beans.Introspector;
+import java.beans.PropertyDescriptor;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -199,7 +202,7 @@ public class OperatorDiscoveryTest
     JSONArray outputPorts = oper.getJSONArray("outputPorts");
 
     Assert.assertNotNull(debug + "Properties aren't null ", props);
-    Assert.assertEquals(debug + "Number of properties ", 5, props.length());
+    Assert.assertEquals(debug + "Number of properties ", 4, props.length());
 
     Assert.assertNotNull(debug + "Port types aren't null ", portTypes);
     Assert.assertEquals(debug + "Number of port types ", 5, portTypes.length());
@@ -285,7 +288,16 @@ public class OperatorDiscoveryTest
 
     JSONArray props = asmDesc.getJSONArray("properties");
     Assert.assertNotNull(debug + "Properties aren't null ", props);
-    Assert.assertEquals(debug + "Number of properties ", 28, props.length());
+    Assert.assertEquals(debug + "Number of properties ", 27, props.length());
+
+    // make sure properties of excluded classes are not in the asm description of the type
+    for(String classN : TypeGraph.EXCLUDE_CLASSES){
+      Class c = Class.forName(classN.replace('/', '.'));
+      BeanInfo bi = Introspector.getBeanInfo(c);
+      for (PropertyDescriptor pd : bi.getPropertyDescriptors()){
+        Assert.assertNull(debug, getJSONProperty(props, pd.getName()));
+      }
+    }
 
     JSONObject mapProperty = getJSONProperty(props, "map");
     Assert.assertEquals(debug + "canGet " + mapProperty, true, mapProperty.get("canGet"));


[38/50] incubator-apex-core git commit: SPOI-6002: NPE while finding if a port type has schema classes

Posted by ch...@apache.org.
SPOI-6002: NPE while finding if a port type has schema classes


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/e914fc9a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/e914fc9a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/e914fc9a

Branch: refs/heads/master
Commit: e914fc9ae8cc0389554c1cbbf69780130e963686
Parents: 9d08532
Author: Chandni Singh <ch...@datatorrent.com>
Authored: Thu Aug 20 13:42:31 2015 -0700
Committer: Chandni Singh <ch...@datatorrent.com>
Committed: Thu Aug 20 13:42:31 2015 -0700

----------------------------------------------------------------------
 .../stram/webapp/OperatorDiscoverer.java           | 17 ++++++++++-------
 1 file changed, 10 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/e914fc9a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java b/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
index 0867b03..b35efe6 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
@@ -808,14 +808,17 @@ public class OperatorDiscoverer
           continue;
         }
         boolean hasSchemaClasses = false;
-        for (String descendant : typeGraph.getInstantiableDescendants(portType)) {
-          try {
-            if (typeGraph.isInstantiableBean(descendant)) {
-              hasSchemaClasses = true;
-              break;
+        List<String> instantiableDescendants = typeGraph.getInstantiableDescendants(portType);
+        if (instantiableDescendants != null) {
+          for (String descendant : instantiableDescendants) {
+            try {
+              if (typeGraph.isInstantiableBean(descendant)) {
+                hasSchemaClasses = true;
+                break;
+              }
+            } catch (JSONException ex) {
+              LOG.warn("checking descendant is instantiable {}", descendant);
             }
-          } catch (JSONException ex) {
-            LOG.warn("checking descendant is instantiable {}", descendant);
           }
         }
         portTypesWithSchemaClasses.put(portType, hasSchemaClasses);


[06/50] incubator-apex-core git commit: writing checkpoints async

Posted by ch...@apache.org.
writing checkpoints async


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/29eb6c37
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/29eb6c37
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/29eb6c37

Branch: refs/heads/master
Commit: 29eb6c377e92242c540d9a4d8be43a1fe05b7ac2
Parents: 66a75e0
Author: Gaurav <ga...@datatorrent.com>
Authored: Thu Jul 30 11:15:24 2015 -0700
Committer: Gaurav <ga...@datatorrent.com>
Committed: Tue Aug 4 16:28:17 2015 -0700

----------------------------------------------------------------------
 .../common/util/AsyncFSStorageAgent.java        | 111 ++++++++++++++++
 .../datatorrent/common/util/FSStorageAgent.java |   3 +-
 .../common/codec/JsonStreamCodecTest.java       |  15 ++-
 .../common/util/AsyncFSStorageAgentTest.java    | 133 +++++++++++++++++++
 .../java/com/datatorrent/stram/StramClient.java |   5 +-
 .../datatorrent/stram/StramLocalCluster.java    |   4 +-
 .../stram/StreamingAppMasterService.java        |   2 +-
 .../stram/StreamingContainerManager.java        |  10 +-
 .../java/com/datatorrent/stram/engine/Node.java |  64 ++++++++-
 .../stram/plan/physical/PhysicalPlan.java       |   8 +-
 .../com/datatorrent/stram/CheckpointTest.java   |  11 +-
 .../stram/LogicalPlanModificationTest.java      |  22 ++-
 .../com/datatorrent/stram/PartitioningTest.java |  26 +++-
 .../stram/StramLocalClusterTest.java            |  22 ++-
 .../datatorrent/stram/StramMiniClusterTest.java |   9 +-
 .../datatorrent/stram/StramRecoveryTest.java    |  56 ++++++--
 .../stram/StreamingContainerManagerTest.java    |  45 ++++++-
 .../stram/debug/TupleRecorderTest.java          |   3 +
 .../stram/engine/AutoMetricTest.java            |   2 +
 .../stram/engine/InputOperatorTest.java         |   5 +-
 .../stram/engine/ProcessingModeTests.java       |   9 ++
 .../datatorrent/stram/engine/SliderTest.java    |   5 +
 .../com/datatorrent/stram/engine/StatsTest.java |  10 +-
 .../stram/engine/WindowGeneratorTest.java       |  11 +-
 .../stram/webapp/StramWebServicesTest.java      |   6 +-
 25 files changed, 527 insertions(+), 70 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
new file mode 100644
index 0000000..d5de61c
--- /dev/null
+++ b/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
@@ -0,0 +1,111 @@
+/**
+ * Copyright (C) 2015 DataTorrent, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *         http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.datatorrent.common.util;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ObjectStreamException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Options;
+import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class AsyncFSStorageAgent extends FSStorageAgent
+{
+  private final transient FileSystem fs;
+  private final transient Configuration conf;
+  private final String localBasePath;
+
+  private boolean syncCheckpoint = false;
+
+  private AsyncFSStorageAgent()
+  {
+    super();
+    fs = null;
+    conf = null;
+    localBasePath = null;
+  }
+
+  public AsyncFSStorageAgent(String path, Configuration conf)
+  {
+    this(".", path, conf);
+  }
+
+  public AsyncFSStorageAgent(String localBasePath, String path, Configuration conf)
+  {
+    super(path, conf);
+    if (localBasePath == null) {
+      this.localBasePath = "/tmp";
+    }
+    else {
+      this.localBasePath = localBasePath;
+    }
+    logger.debug("Initialize storage agent with {}.", this.localBasePath);
+    this.conf = conf == null ? new Configuration() : conf;
+    try {
+      fs = FileSystem.newInstance(this.conf);
+    } catch (IOException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  @Override
+  public void save(final Object object, final int operatorId, final long windowId) throws IOException
+  {
+    String operatorIdStr = String.valueOf(operatorId);
+    File directory = new File(localBasePath, operatorIdStr);
+    if (!directory.exists()) {
+      directory.mkdirs();
+    }
+    try (FileOutputStream stream = new FileOutputStream(new File(directory, String.valueOf(windowId)))) {
+      store(stream, object);
+    }
+  }
+
+  public void copyToHDFS(final int operatorId, final long windowId) throws IOException
+  {
+    String operatorIdStr = String.valueOf(operatorId);
+    File directory = new File(localBasePath, operatorIdStr);
+    String window = Long.toHexString(windowId);
+    Path lPath = new Path(path + Path.SEPARATOR + operatorIdStr + Path.SEPARATOR + System.currentTimeMillis() + TMP_FILE);
+    FileUtil.copy(new File(directory, String.valueOf(windowId)), fs, lPath, true, conf);
+    fileContext.rename(lPath, new Path(path + Path.SEPARATOR + operatorIdStr + Path.SEPARATOR + window), Options.Rename.OVERWRITE);
+  }
+
+  @Override
+  public Object readResolve() throws ObjectStreamException
+  {
+    return new AsyncFSStorageAgent(this.localBasePath, this.path, null);
+  }
+
+  public boolean isSyncCheckpoint()
+  {
+    return syncCheckpoint;
+  }
+
+  public void setSyncCheckpoint(boolean syncCheckpoint)
+  {
+    this.syncCheckpoint = syncCheckpoint;
+  }
+
+  private static final long serialVersionUID = 201507241610L;
+  private static final Logger logger = LoggerFactory.getLogger(AsyncFSStorageAgent.class);
+}

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java b/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java
index 31b537d..14275fa 100644
--- a/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java
+++ b/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java
@@ -52,8 +52,7 @@ public class FSStorageAgent implements StorageAgent, Serializable
     kryo = new Kryo();
   }
 
-  @SuppressWarnings("unused")
-  private FSStorageAgent()
+  protected FSStorageAgent()
   {
     path = null;
     fileContext = null;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/common/src/test/java/com/datatorrent/common/codec/JsonStreamCodecTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/com/datatorrent/common/codec/JsonStreamCodecTest.java b/common/src/test/java/com/datatorrent/common/codec/JsonStreamCodecTest.java
index e0a5f01..a9303bc 100644
--- a/common/src/test/java/com/datatorrent/common/codec/JsonStreamCodecTest.java
+++ b/common/src/test/java/com/datatorrent/common/codec/JsonStreamCodecTest.java
@@ -1,14 +1,17 @@
 /**
  * Copyright (C) 2015 DataTorrent, Inc.
  *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ *         http://www.apache.org/licenses/LICENSE-2.0
  *
- * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
- * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 package com.datatorrent.common.codec;
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java b/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
new file mode 100644
index 0000000..e7f9f66
--- /dev/null
+++ b/common/src/test/java/com/datatorrent/common/util/AsyncFSStorageAgentTest.java
@@ -0,0 +1,133 @@
+/**
+ * Copyright (C) 2015 DataTorrent, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *         http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.datatorrent.common.util;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
+
+import com.google.common.collect.Maps;
+
+import com.datatorrent.api.Attribute;
+import com.datatorrent.api.DAG;
+
+public class AsyncFSStorageAgentTest
+{
+  private static class TestMeta extends TestWatcher
+  {
+    String applicationPath;
+    String basePath;
+    AsyncFSStorageAgent storageAgent;
+
+    @Override
+    protected void starting(Description description)
+    {
+      super.starting(description);
+      basePath = "target/" + description.getClassName() + "/" + description.getMethodName();
+      applicationPath = basePath + "/app";
+      try {
+        FileUtils.forceMkdir(new File(basePath));
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+      storageAgent = new AsyncFSStorageAgent(basePath, applicationPath, null);
+
+      Attribute.AttributeMap.DefaultAttributeMap attributes = new Attribute.AttributeMap.DefaultAttributeMap();
+      attributes.put(DAG.APPLICATION_PATH, applicationPath);
+    }
+
+    @Override
+    protected void finished(Description description)
+    {
+      try {
+        FileUtils.deleteDirectory(new File("target/" + description.getClassName()));
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+
+  @Rule
+  public TestMeta testMeta = new TestMeta();
+
+  @Test
+  public void testSave() throws IOException
+  {
+    Map<Integer, String> data = Maps.newHashMap();
+    data.put(1, "one");
+    data.put(2, "two");
+    data.put(3, "three");
+    testMeta.storageAgent.save(data, 1, 1);
+    testMeta.storageAgent.copyToHDFS(1, 1);
+    @SuppressWarnings("unchecked")
+    Map<Integer, String> decoded = (Map<Integer, String>) testMeta.storageAgent.load(1, 1);
+    Assert.assertEquals("dataOf1", data, decoded);
+  }
+
+  @Test
+  public void testLoad() throws IOException
+  {
+    Map<Integer, String> dataOf1 = Maps.newHashMap();
+    dataOf1.put(1, "one");
+    dataOf1.put(2, "two");
+    dataOf1.put(3, "three");
+
+    Map<Integer, String> dataOf2 = Maps.newHashMap();
+    dataOf2.put(4, "four");
+    dataOf2.put(5, "five");
+    dataOf2.put(6, "six");
+
+    testMeta.storageAgent.save(dataOf1, 1, 1);
+    testMeta.storageAgent.copyToHDFS(1, 1);
+    testMeta.storageAgent.save(dataOf2, 2, 1);
+    testMeta.storageAgent.copyToHDFS(2, 1);
+    @SuppressWarnings("unchecked")
+    Map<Integer, String> decoded1 = (Map<Integer, String>) testMeta.storageAgent.load(1, 1);
+
+    @SuppressWarnings("unchecked")
+    Map<Integer, String> decoded2 = (Map<Integer, String>) testMeta.storageAgent.load(2, 1);
+    Assert.assertEquals("data of 1", dataOf1, decoded1);
+    Assert.assertEquals("data of 2", dataOf2, decoded2);
+  }
+
+  @Test
+  public void testRecovery() throws IOException
+  {
+    testSave();
+    testMeta.storageAgent = new AsyncFSStorageAgent(testMeta.basePath, testMeta.applicationPath, null);
+    testSave();
+  }
+
+  @Test
+  public void testDelete() throws IOException
+  {
+    testLoad();
+    testMeta.storageAgent.delete(1, 1);
+    Path appPath = new Path(testMeta.applicationPath);
+    FileContext fileContext = FileContext.getFileContext();
+    Assert.assertTrue("operator 2 window 1", fileContext.util().exists(new Path(appPath + "/" + 2 + "/" + 1)));
+    Assert.assertFalse("operator 1 window 1", fileContext.util().exists(new Path(appPath + "/" + 1 + "/" + 1)));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/main/java/com/datatorrent/stram/StramClient.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StramClient.java b/engine/src/main/java/com/datatorrent/stram/StramClient.java
index 657f678..dfb4511 100644
--- a/engine/src/main/java/com/datatorrent/stram/StramClient.java
+++ b/engine/src/main/java/com/datatorrent/stram/StramClient.java
@@ -52,7 +52,7 @@ import org.apache.hadoop.yarn.util.Records;
 import org.apache.log4j.DTLoggerFactory;
 
 import com.datatorrent.api.Context.OperatorContext;
-
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.BasicContainerOptConfigurator;
 import com.datatorrent.common.util.FSStorageAgent;
 import com.datatorrent.stram.client.StramClientUtils;
@@ -456,8 +456,9 @@ public class StramClient
       if (dag.getAttributes().get(OperatorContext.STORAGE_AGENT) == null) { /* which would be the most likely case */
         Path checkpointPath = new Path(appPath, LogicalPlan.SUBDIR_CHECKPOINTS);
         // use conf client side to pickup any proxy settings from dt-site.xml
-        dag.setAttribute(OperatorContext.STORAGE_AGENT, new FSStorageAgent(checkpointPath.toString(), conf));
+        dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(checkpointPath.toString(), conf));
       }
+
       if(dag.getAttributes().get(LogicalPlan.CONTAINER_OPTS_CONFIGURATOR) == null){
         dag.setAttribute(LogicalPlan.CONTAINER_OPTS_CONFIGURATOR,new BasicContainerOptConfigurator());
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/main/java/com/datatorrent/stram/StramLocalCluster.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StramLocalCluster.java b/engine/src/main/java/com/datatorrent/stram/StramLocalCluster.java
index c7ac0cb..e28c097 100644
--- a/engine/src/main/java/com/datatorrent/stram/StramLocalCluster.java
+++ b/engine/src/main/java/com/datatorrent/stram/StramLocalCluster.java
@@ -35,11 +35,13 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.ipc.ProtocolSignature;
 import org.apache.hadoop.net.NetUtils;
 
+import com.datatorrent.api.Context;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.LocalMode.Controller;
 import com.datatorrent.api.Operator;
 import com.datatorrent.bufferserver.server.Server;
 import com.datatorrent.bufferserver.storage.DiskStorage;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.FSStorageAgent;
 import com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest;
 import com.datatorrent.stram.StreamingContainerManager.ContainerResource;
@@ -298,7 +300,7 @@ public class StramLocalCluster implements Runnable, Controller
       dag.getAttributes().put(LogicalPlan.APPLICATION_PATH, pathUri);
     }
     if (dag.getAttributes().get(OperatorContext.STORAGE_AGENT) == null) {
-      dag.setAttribute(OperatorContext.STORAGE_AGENT, new FSStorageAgent(new Path(pathUri, LogicalPlan.SUBDIR_CHECKPOINTS).toString(), null));
+      dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(new Path(pathUri, LogicalPlan.SUBDIR_CHECKPOINTS).toString(), null));
     }
     this.dnmgr = new StreamingContainerManager(dag);
     this.umbilical = new UmbilicalProtocolLocalImpl();

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java b/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
index dbb3d11..5246c9e 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
@@ -5,7 +5,7 @@
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ *         http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index 95f4648..0847f3c 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@ -76,6 +76,7 @@ import com.datatorrent.api.annotation.Stateless;
 import com.datatorrent.bufferserver.auth.AuthManager;
 import com.datatorrent.bufferserver.util.Codec;
 import com.datatorrent.common.experimental.AppData;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.FSStorageAgent;
 import com.datatorrent.common.util.NumberAggregate;
 import com.datatorrent.common.util.Pair;
@@ -2949,7 +2950,14 @@ public class StreamingContainerManager implements PlanContext
 
       this.finals = new FinalVars(finals, lp);
       StorageAgent sa = lp.getValue(OperatorContext.STORAGE_AGENT);
-      if (sa instanceof FSStorageAgent) {
+      if(sa instanceof AsyncFSStorageAgent){
+        // replace the default storage agent, if present
+        AsyncFSStorageAgent fssa = (AsyncFSStorageAgent) sa;
+        if (fssa.path.contains(oldAppId)) {
+          fssa = new AsyncFSStorageAgent(fssa.path.replace(oldAppId, appId), conf);
+          lp.setAttribute(OperatorContext.STORAGE_AGENT, fssa);
+        }
+      } else if (sa instanceof FSStorageAgent) {
         // replace the default storage agent, if present
         FSStorageAgent fssa = (FSStorageAgent) sa;
         if (fssa.path.contains(oldAppId)) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/main/java/com/datatorrent/stram/engine/Node.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/engine/Node.java b/engine/src/main/java/com/datatorrent/stram/engine/Node.java
index 24679dc..ea33970 100644
--- a/engine/src/main/java/com/datatorrent/stram/engine/Node.java
+++ b/engine/src/main/java/com/datatorrent/stram/engine/Node.java
@@ -27,8 +27,7 @@ import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.util.*;
 import java.util.Map.Entry;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.*;
 
 import org.apache.hadoop.util.ReflectionUtils;
 import org.slf4j.Logger;
@@ -46,6 +45,9 @@ import com.datatorrent.api.Operator.Unifier;
 import com.datatorrent.api.StatsListener.OperatorRequest;
 
 import com.datatorrent.bufferserver.util.Codec;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
+import com.datatorrent.common.util.Pair;
+import com.datatorrent.netlet.util.DTThrowable;
 import com.datatorrent.stram.api.Checkpoint;
 import com.datatorrent.stram.api.OperatorDeployInfo;
 import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.ContainerStats;
@@ -99,12 +101,16 @@ public abstract class Node<OPERATOR extends Operator> implements Component<Opera
   public final BlockingQueue<StatsListener.OperatorResponse> commandResponse;
   private final List<Field> metricFields;
   private final Map<String, Method> metricMethods;
+  private ExecutorService executorService;
+  private Queue<Pair<FutureTask<Stats.CheckpointStats>, Long>> taskQueue;
   protected Stats.CheckpointStats checkpointStats;
 
   public Node(OPERATOR operator, OperatorContext context)
   {
     this.operator = operator;
     this.context = context;
+    executorService = Executors.newSingleThreadExecutor();
+    taskQueue = new LinkedList<Pair<FutureTask<Stats.CheckpointStats>, Long>>();
 
     outputs = new HashMap<String, Sink<Object>>();
 
@@ -173,6 +179,9 @@ public abstract class Node<OPERATOR extends Operator> implements Component<Opera
       pcpair.component.teardown();
     }
 
+    if (executorService != null) {
+      executorService.shutdownNow();
+    }
     operator.teardown();
   }
 
@@ -405,6 +414,21 @@ public abstract class Node<OPERATOR extends Operator> implements Component<Opera
       checkpointStats = null;
       checkpoint = null;
     }
+    else {
+      Pair<FutureTask<Stats.CheckpointStats>, Long> pair = taskQueue.peek();
+      if (pair != null && pair.getFirst().isDone()) {
+        taskQueue.poll();
+        try {
+          stats.checkpointStats = pair.getFirst().get();
+          stats.checkpoint = new Checkpoint(pair.getSecond(), applicationWindowCount, checkpointWindowCount);
+          if (operator instanceof Operator.CheckpointListener) {
+            ((Operator.CheckpointListener) operator).checkpointed(pair.getSecond());
+          }
+        } catch (Exception ex) {
+          throw DTThrowable.wrapIfChecked(ex);
+        }
+      }
+    }
 
     context.report(stats, windowId);
   }
@@ -440,6 +464,25 @@ public abstract class Node<OPERATOR extends Operator> implements Component<Opera
           checkpointStats = new Stats.CheckpointStats();
           checkpointStats.checkpointStartTime = System.currentTimeMillis();
           ba.save(operator, id, windowId);
+          if (ba instanceof AsyncFSStorageAgent) {
+            AsyncFSStorageAgent asyncFSStorageAgent = (AsyncFSStorageAgent) ba;
+            if (!asyncFSStorageAgent.isSyncCheckpoint() && PROCESSING_MODE != ProcessingMode.EXACTLY_ONCE) {
+              CheckpointHandler checkpointHandler = new CheckpointHandler();
+              checkpointHandler.agent = asyncFSStorageAgent;
+              checkpointHandler.operatorId = id;
+              checkpointHandler.windowId = windowId;
+              checkpointHandler.stats = checkpointStats;
+              FutureTask<Stats.CheckpointStats> futureTask = new FutureTask<Stats.CheckpointStats>(checkpointHandler);
+              taskQueue.add(new Pair<FutureTask<Stats.CheckpointStats>, Long>(futureTask, windowId));
+              executorService.submit(futureTask);
+              checkpoint = null;
+              checkpointStats = null;
+              return;
+            }
+            else {
+              asyncFSStorageAgent.copyToHDFS(id, windowId);
+            }
+          }
           checkpointStats.checkpointTime = System.currentTimeMillis() - checkpointStats.checkpointStartTime;
         }
         catch (IOException ie) {
@@ -570,5 +613,22 @@ public abstract class Node<OPERATOR extends Operator> implements Component<Opera
     deactivateSinks();
   }
 
+  private class CheckpointHandler implements Callable<Stats.CheckpointStats>
+  {
+
+    public AsyncFSStorageAgent agent;
+    public int operatorId;
+    public long windowId;
+    public Stats.CheckpointStats stats;
+
+    @Override
+    public Stats.CheckpointStats call() throws Exception
+    {
+      agent.copyToHDFS(id, windowId);
+      stats.checkpointTime = System.currentTimeMillis() - stats.checkpointStartTime;
+      return stats;
+    }
+  }
+
   private static final Logger logger = LoggerFactory.getLogger(Node.class);
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java b/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
index 7c0432d..5b90c04 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/physical/PhysicalPlan.java
@@ -40,6 +40,8 @@ import com.datatorrent.api.Partitioner.Partition;
 import com.datatorrent.api.Partitioner.PartitionKeys;
 import com.datatorrent.api.StatsListener.OperatorRequest;
 import com.datatorrent.api.annotation.Stateless;
+
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.Journal.Recoverable;
 import com.datatorrent.stram.api.Checkpoint;
 import com.datatorrent.stram.api.StramEvent;
@@ -941,7 +943,11 @@ public class PhysicalPlan implements Serializable
     try {
       LOG.debug("Writing activation checkpoint {} {} {}", checkpoint, oper, oo);
       long windowId = oper.isOperatorStateLess() ? Stateless.WINDOW_ID : checkpoint.windowId;
-      oper.operatorMeta.getValue(OperatorContext.STORAGE_AGENT).save(oo, oper.id, windowId);
+      StorageAgent agent = oper.operatorMeta.getValue(OperatorContext.STORAGE_AGENT);
+      agent.save(oo, oper.id, windowId);
+      if (agent instanceof AsyncFSStorageAgent) {
+        ((AsyncFSStorageAgent) agent).copyToHDFS(oper.id, windowId);
+      }
     } catch (IOException e) {
       // inconsistent state, no recovery option, requires shutdown
       throw new IllegalStateException("Failed to write operator state after partition change " + oper, e);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/CheckpointTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/CheckpointTest.java b/engine/src/test/java/com/datatorrent/stram/CheckpointTest.java
index dd804ec..4072894 100644
--- a/engine/src/test/java/com/datatorrent/stram/CheckpointTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/CheckpointTest.java
@@ -37,6 +37,7 @@ import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.api.annotation.Stateless;
 
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.MockContainer.MockOperatorStats;
 import com.datatorrent.stram.StreamingContainerManager.UpdateCheckpointsContext;
 import com.datatorrent.stram.api.Checkpoint;
@@ -111,6 +112,9 @@ public class CheckpointTest
   {
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.APPLICATION_PATH, testMeta.dir);
+    AsyncFSStorageAgent storageAgent = new AsyncFSStorageAgent(testMeta.dir + "/locaPath", testMeta.dir, null);
+    storageAgent.setSyncCheckpoint(true);
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, storageAgent);
     dag.setAttribute(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 1);
     dag.setAttribute(LogicalPlan.HEARTBEAT_INTERVAL_MILLIS, 50);
     dag.setAttribute(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
@@ -127,14 +131,13 @@ public class CheckpointTest
     sc.setHeartbeatMonitoringEnabled(false);
     sc.run();
 
-    StorageAgent fssa = sc.getDAG().getValue(OperatorContext.STORAGE_AGENT);
     StreamingContainerManager dnm = sc.dnmgr;
     PhysicalPlan plan = dnm.getPhysicalPlan();
     Assert.assertEquals("number required containers", 1, dnm.getPhysicalPlan().getContainers().size());
 
     PTOperator o1p1 = plan.getOperators(dag.getMeta(o1)).get(0);
     Set<Long> checkpoints = Sets.newHashSet();
-    for (long windowId : fssa.getWindowIds(o1p1.getId())) {
+    for (long windowId : storageAgent.getWindowIds(o1p1.getId())) {
       checkpoints.add(windowId);
     }
     Assert.assertEquals("number checkpoints " + checkpoints, 3, checkpoints.size());
@@ -142,7 +145,7 @@ public class CheckpointTest
 
     PTOperator o2p1 = plan.getOperators(dag.getMeta(o2)).get(0);
     checkpoints = Sets.newHashSet();
-    for (long windowId : fssa.getWindowIds(o2p1.getId())) {
+    for (long windowId : storageAgent.getWindowIds(o2p1.getId())) {
       checkpoints.add(windowId);
     }
     Assert.assertEquals("number checkpoints " + checkpoints, 1, checkpoints.size());
@@ -152,7 +155,7 @@ public class CheckpointTest
     Assert.assertNotNull("checkpoint not null for statefull operator " + o1p1, o1p1.stats.checkpointStats);
 
     for (Checkpoint cp : o1p1.checkpoints) {
-      Object load = fssa.load(o1p1.getId(), cp.windowId);
+      Object load = storageAgent.load(o1p1.getId(), cp.windowId);
       Assert.assertEquals("Stored Operator and Saved State", load.getClass(), o1p1.getOperatorMeta().getOperator().getClass());
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/LogicalPlanModificationTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/LogicalPlanModificationTest.java b/engine/src/test/java/com/datatorrent/stram/LogicalPlanModificationTest.java
index db1d9ec..78a1bd8 100644
--- a/engine/src/test/java/com/datatorrent/stram/LogicalPlanModificationTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/LogicalPlanModificationTest.java
@@ -27,9 +27,10 @@ import org.junit.Rule;
 import org.junit.Test;
 
 import com.datatorrent.api.DAG.Locality;
+import com.datatorrent.api.StorageAgent;
 
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.FSStorageAgent;
-import com.datatorrent.stram.StreamingContainerManager;
 import com.datatorrent.stram.engine.GenericTestOperator;
 import com.datatorrent.stram.engine.OperatorContext;
 import com.datatorrent.stram.engine.TestGeneratorInputOperator;
@@ -291,15 +292,14 @@ public class LogicalPlanModificationTest
 
   }
 
-  @Test
-  public void testExecutionManager() throws Exception {
+  private void testExecutionManager(StorageAgent agent) throws Exception {
 
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(com.datatorrent.api.Context.DAGContext.APPLICATION_PATH, testMeta.dir);
-    dag.setAttribute(OperatorContext.STORAGE_AGENT, new FSStorageAgent(testMeta.dir, null));
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, agent);
 
     StreamingContainerManager dnm = new StreamingContainerManager(dag);
-    Assert.assertEquals(""+dnm.containerStartRequests, dnm.containerStartRequests.size(), 0);
+    Assert.assertEquals("" + dnm.containerStartRequests, dnm.containerStartRequests.size(), 0);
 
 
     CreateOperatorRequest cor = new CreateOperatorRequest();
@@ -331,4 +331,16 @@ public class LogicalPlanModificationTest
 
   }
 
+  @Test
+  public void testExecutionManagerWithSyncStorageAgent() throws Exception
+  {
+    testExecutionManager(new FSStorageAgent(testMeta.dir, null));
+  }
+
+  @Test
+  public void testExecutionManagerWithAsyncStorageAgent() throws Exception
+  {
+    testExecutionManager(new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/PartitioningTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/PartitioningTest.java b/engine/src/test/java/com/datatorrent/stram/PartitioningTest.java
index 9c169ee..15ad76e 100644
--- a/engine/src/test/java/com/datatorrent/stram/PartitioningTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/PartitioningTest.java
@@ -27,13 +27,13 @@ import org.junit.Before;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import com.google.common.collect.Sets;
+
 import com.datatorrent.api.*;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.annotation.OutputPortFieldAnnotation;
 import com.datatorrent.common.partitioner.StatelessPartitioner;
-import com.datatorrent.common.util.FSStorageAgent;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.StramLocalCluster.LocalStreamingContainer;
 import com.datatorrent.stram.api.Checkpoint;
 import com.datatorrent.stram.engine.Node;
@@ -150,6 +150,8 @@ public class PartitioningTest
   public void testDefaultPartitioning() throws Exception
   {
     LogicalPlan dag = new LogicalPlan();
+    File checkpointDir = new File(TEST_OUTPUT_DIR, "testDefaultPartitioning");
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(checkpointDir.getPath() + "/localPath", checkpointDir.getPath(), null));
 
     Integer[][] testData = {
       {4, 5}
@@ -249,6 +251,9 @@ public class PartitioningTest
   {
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.CONTAINERS_MAX_COUNT, 5);
+    File checkpointDir = new File(TEST_OUTPUT_DIR, "testDynamicDefaultPartitioning");
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(checkpointDir.getPath() + "/localPath", checkpointDir.getPath(), null));
+
     CollectorOperator.receivedTuples.clear();
 
     TestInputOperator<Integer> input = dag.addOperator("input", new TestInputOperator<Integer>());
@@ -391,12 +396,12 @@ public class PartitioningTest
      *
      * @throws Exception
      */
-    @Test
-    public void testInputOperatorPartitioning() throws Exception
+
+    private void testInputOperatorPartitioning(LogicalPlan dag) throws Exception
     {
       File checkpointDir = new File(TEST_OUTPUT_DIR, "testInputOperatorPartitioning");
-      LogicalPlan dag = new LogicalPlan();
       dag.getAttributes().put(LogicalPlan.APPLICATION_PATH, checkpointDir.getPath());
+      dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(checkpointDir.getPath() + "/localPath", checkpointDir.getPath(), null));
 
       PartitionableInputOperator input = dag.addOperator("input", new PartitionableInputOperator());
       dag.setAttribute(input, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{new PartitionLoadWatch()}));
@@ -418,7 +423,10 @@ public class PartitioningTest
         Checkpoint checkpoint = new Checkpoint(10L, 0, 0);
         p.checkpoints.add(checkpoint);
         p.setRecoveryCheckpoint(checkpoint);
-        new FSStorageAgent(checkpointDir.getPath(), null).save(inputDeployed, p.getId(), 10L);
+        AsyncFSStorageAgent agent = new AsyncFSStorageAgent(checkpointDir.getPath() + "/localPath", checkpointDir.getPath(), null);
+        agent.save(inputDeployed, p.getId(), 10L);
+        agent.copyToHDFS(p.getId(), 10l);
+
       }
 
       Assert.assertEquals("", Sets.newHashSet("partition_0", "partition_1", "partition_2"), partProperties);
@@ -447,6 +455,12 @@ public class PartitioningTest
 
     }
 
+    @Test
+    public void testInputOperatorPartitioningWithAsyncStorageAgent() throws Exception
+    {
+      LogicalPlan dag = new LogicalPlan();
+      testInputOperatorPartitioning(dag);
+    }
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/StramLocalClusterTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StramLocalClusterTest.java b/engine/src/test/java/com/datatorrent/stram/StramLocalClusterTest.java
index 8489c70..1881566 100644
--- a/engine/src/test/java/com/datatorrent/stram/StramLocalClusterTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StramLocalClusterTest.java
@@ -15,7 +15,6 @@
  */
 package com.datatorrent.stram;
 
-import com.datatorrent.stram.api.Checkpoint;
 import java.io.File;
 import java.io.FileReader;
 import java.io.IOException;
@@ -23,22 +22,17 @@ import java.io.LineNumberReader;
 import java.util.Arrays;
 import java.util.Map;
 
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.api.Context;
+
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.StramLocalCluster.LocalStreamingContainer;
 import com.datatorrent.stram.StramLocalCluster.MockComponentFactory;
-import com.datatorrent.stram.engine.GenericTestOperator;
-import com.datatorrent.stram.engine.Node;
-import com.datatorrent.stram.engine.OperatorContext;
-import com.datatorrent.stram.engine.TestGeneratorInputOperator;
-import com.datatorrent.stram.engine.TestOutputOperator;
-import com.datatorrent.stram.engine.WindowGenerator;
+import com.datatorrent.stram.api.Checkpoint;
+import com.datatorrent.stram.engine.*;
 import com.datatorrent.stram.plan.logical.LogicalPlan;
 import com.datatorrent.stram.plan.physical.PTOperator;
 import com.datatorrent.stram.support.ManualScheduledExecutorService;
@@ -75,6 +69,7 @@ public class StramLocalClusterTest
   {
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.APPLICATION_PATH, testMeta.dir);
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
 
     TestGeneratorInputOperator genNode = dag.addOperator("genNode", TestGeneratorInputOperator.class);
     genNode.setMaxTuples(2);
@@ -114,6 +109,9 @@ public class StramLocalClusterTest
   {
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.APPLICATION_PATH, testMeta.dir);
+    AsyncFSStorageAgent agent = new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null);
+    agent.setSyncCheckpoint(true);
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, agent);
 
     TestGeneratorInputOperator node1 = dag.addOperator("o1", TestGeneratorInputOperator.class);
     // data will be added externally from test

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/StramMiniClusterTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StramMiniClusterTest.java b/engine/src/test/java/com/datatorrent/stram/StramMiniClusterTest.java
index 4d0cd37..99478f5 100644
--- a/engine/src/test/java/com/datatorrent/stram/StramMiniClusterTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StramMiniClusterTest.java
@@ -48,6 +48,7 @@ import com.sun.jersey.api.client.WebResource;
 import com.datatorrent.api.*;
 import com.datatorrent.api.Context.OperatorContext;
 
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.client.StramClientUtils;
 import com.datatorrent.stram.client.StramClientUtils.YarnClientHelper;
 import com.datatorrent.stram.engine.GenericTestOperator;
@@ -202,6 +203,9 @@ public class StramMiniClusterTest
     LogicalPlanConfiguration tb = new LogicalPlanConfiguration(conf);
     tb.addFromProperties(dagProps, null);
     LogicalPlan dag = createDAG(tb);
+    AsyncFSStorageAgent agent = new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null);
+    agent.setSyncCheckpoint(true);
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, agent);
     Configuration yarnConf = new Configuration(yarnCluster.getConfig());
     StramClient client = new StramClient(yarnConf, dag);
     try {
@@ -357,7 +361,10 @@ public class StramMiniClusterTest
   {
 
     LogicalPlan dag = new LogicalPlan();
-    dag.setAttribute(LogicalPlan.APPLICATION_PATH, "file:" + System.getProperty("user.dir") + "/" + testMeta.dir);
+    dag.setAttribute(LogicalPlan.APPLICATION_PATH, testMeta.dir);
+    AsyncFSStorageAgent agent = new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null);
+    agent.setSyncCheckpoint(true);
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, agent);
     FailingOperator badOperator = dag.addOperator("badOperator", FailingOperator.class);
     dag.getContextAttributes(badOperator).put(OperatorContext.RECOVERY_ATTEMPTS, 1);
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java b/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java
index 8515734..6172d8a 100644
--- a/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java
@@ -46,6 +46,7 @@ import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.StatsListener;
 import com.datatorrent.api.StorageAgent;
 
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.FSStorageAgent;
 import com.datatorrent.stram.api.Checkpoint;
 import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol;
@@ -70,8 +71,7 @@ public class StramRecoveryTest
   private static final Logger LOG = LoggerFactory.getLogger(StramRecoveryTest.class);
   @Rule public final TestMeta testMeta = new TestMeta();
 
-  @Test
-  public void testPhysicalPlanSerialization() throws Exception
+  private void testPhysicalPlanSerialization(StorageAgent agent) throws Exception
   {
     LogicalPlan dag = new LogicalPlan();
 
@@ -86,7 +86,7 @@ public class StramRecoveryTest
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 2);
 
     TestPlanContext ctx = new TestPlanContext();
-    dag.setAttribute(OperatorContext.STORAGE_AGENT, new FSStorageAgent(testMeta.dir, null));
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, agent);
     PhysicalPlan plan = new PhysicalPlan(dag, ctx);
 
     ByteArrayOutputStream  bos = new ByteArrayOutputStream();
@@ -121,6 +121,18 @@ public class StramRecoveryTest
 
   }
 
+  @Test
+  public void testPhysicalPlanSerializationWithSyncAgent() throws Exception
+  {
+    testPhysicalPlanSerialization(new FSStorageAgent(testMeta.dir, null));
+  }
+
+  @Test
+  public void testPhysicalPlanSerializationWithAsyncAgent() throws Exception
+  {
+    testPhysicalPlanSerialization(new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
+  }
+
   public static class StatsListeningOperator extends TestGeneratorInputOperator implements StatsListener
   {
     int processStatsCnt = 0;
@@ -144,14 +156,13 @@ public class StramRecoveryTest
    * Test serialization of the container manager with mock execution layer.
    * @throws Exception
    */
-  @Test
-  public void testContainerManager() throws Exception
+  private void testContainerManager(StorageAgent agent) throws Exception
   {
     FileUtils.deleteDirectory(new File(testMeta.dir)); // clean any state from previous run
 
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.APPLICATION_PATH, testMeta.dir);
-    dag.setAttribute(OperatorContext.STORAGE_AGENT, new FSStorageAgent(testMeta.dir, null));
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, agent);
 
     StatsListeningOperator o1 = dag.addOperator("o1", StatsListeningOperator.class);
 
@@ -254,6 +265,18 @@ public class StramRecoveryTest
   }
 
   @Test
+  public void testContainerManagerWithSyncAgent() throws Exception
+  {
+    testPhysicalPlanSerialization(new FSStorageAgent(testMeta.dir, null));
+  }
+
+  @Test
+  public void testContainerManagerWithAsyncAgent() throws Exception
+  {
+    testPhysicalPlanSerialization(new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
+  }
+
+  @Test
   public void testWriteAheadLog() throws Exception
   {
     final MutableInt flushCount = new MutableInt();
@@ -358,19 +381,17 @@ public class StramRecoveryTest
     scm.setPhysicalOperatorProperty(o1p1.getId(), "maxTuples", "50");
   }
 
-  @Test
-  public void testRestartApp() throws Exception
+  private void testRestartApp(StorageAgent agent, String appPath1) throws Exception
   {
     FileUtils.deleteDirectory(new File(testMeta.dir)); // clean any state from previous run
     String appId1 = "app1";
     String appId2 = "app2";
-    String appPath1 = testMeta.dir + "/" + appId1;
     String appPath2 = testMeta.dir + "/" + appId2;
 
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.APPLICATION_ID, appId1);
     dag.setAttribute(LogicalPlan.APPLICATION_PATH, appPath1);
-    dag.setAttribute(OperatorContext.STORAGE_AGENT, new FSStorageAgent(appPath1 + "/" + LogicalPlan.SUBDIR_CHECKPOINTS, null));
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, agent);
     dag.addOperator("o1", StatsListeningOperator.class);
 
     FSRecoveryHandler recoveryHandler = new FSRecoveryHandler(dag.assertAppPath(), new Configuration(false));
@@ -419,6 +440,21 @@ public class StramRecoveryTest
   }
 
   @Test
+  public void testRestartAppWithSyncAgent() throws Exception
+  {
+    String appPath1 = testMeta.dir + "/app1";
+    testRestartApp(new FSStorageAgent(appPath1 + "/" + LogicalPlan.SUBDIR_CHECKPOINTS, null), appPath1);
+  }
+
+  @Test
+  public void testRestartAppWithAsyncAgent() throws Exception
+  {
+    String appPath1 = testMeta.dir + "/app1";
+    String checkpointPath = testMeta.dir + "/localPath";
+    testRestartApp(new AsyncFSStorageAgent(checkpointPath, appPath1 + "/" + LogicalPlan.SUBDIR_CHECKPOINTS, null), appPath1);
+  }
+
+  @Test
   public void testRpcFailover() throws Exception
   {
     String appPath = testMeta.dir;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
index c1567b8..ba15a78 100644
--- a/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StreamingContainerManagerTest.java
@@ -32,6 +32,7 @@ import org.junit.Test;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
+import com.datatorrent.api.Context;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.api.Context.PortContext;
 import com.datatorrent.api.DAG.Locality;
@@ -41,6 +42,7 @@ import com.datatorrent.api.StatsListener;
 import com.datatorrent.api.annotation.Stateless;
 
 import com.datatorrent.common.partitioner.StatelessPartitioner;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.FSStorageAgent;
 import com.datatorrent.stram.StreamingContainerAgent.ContainerStartRequest;
 import com.datatorrent.stram.StreamingContainerManager.ContainerResource;
@@ -56,12 +58,7 @@ import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.ContainerSt
 import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.OperatorHeartbeat;
 import com.datatorrent.stram.appdata.AppDataPushAgent;
 import com.datatorrent.stram.codec.DefaultStatefulStreamCodec;
-import com.datatorrent.stram.engine.DefaultUnifier;
-import com.datatorrent.stram.engine.GenericTestOperator;
-import com.datatorrent.stram.engine.TestAppDataQueryOperator;
-import com.datatorrent.stram.engine.TestAppDataResultOperator;
-import com.datatorrent.stram.engine.TestAppDataSourceOperator;
-import com.datatorrent.stram.engine.TestGeneratorInputOperator;
+import com.datatorrent.stram.engine.*;
 import com.datatorrent.stram.plan.TestPlanContext;
 import com.datatorrent.stram.plan.logical.LogicalPlan;
 import com.datatorrent.stram.plan.logical.LogicalPlan.OperatorMeta;
@@ -471,6 +468,37 @@ public class StreamingContainerManagerTest {
   }
 
   @Test
+  public void testAsyncCheckpointWindowIds() throws Exception
+  {
+    File path = new File(testMeta.dir);
+    FileUtils.deleteDirectory(path.getAbsoluteFile());
+    FileUtils.forceMkdir(new File(path.getAbsoluteFile(), "/localPath"));
+
+    AsyncFSStorageAgent sa = new AsyncFSStorageAgent(path.getPath() + "/localPath", path.getPath(), null);
+
+    long[] windowIds = new long[]{123L, 345L, 234L};
+    for (long windowId : windowIds) {
+      sa.save(windowId, 1, windowId);
+      sa.copyToHDFS(1, windowId);
+    }
+
+    Arrays.sort(windowIds);
+    long[] windowsIds = sa.getWindowIds(1);
+    Arrays.sort(windowsIds);
+    Assert.assertArrayEquals("Saved windowIds", windowIds, windowsIds);
+
+    for (long windowId : windowIds) {
+      sa.delete(1, windowId);
+    }
+    try {
+      sa.getWindowIds(1);
+      Assert.fail("There should not be any most recently saved windowId!");
+    } catch (IOException io) {
+      Assert.assertTrue("No State Saved", true);
+    }
+  }
+
+  @Test
   public void testProcessHeartbeat() throws Exception
   {
     FileUtils.deleteDirectory(new File(testMeta.dir)); // clean any state from previous run
@@ -712,6 +740,8 @@ public class StreamingContainerManagerTest {
   @Test
   public void testPhysicalPropertyUpdate() throws Exception{
     LogicalPlan dag = new LogicalPlan();
+    String workingDir = new File("target/testPhysicalPropertyUpdate").getAbsolutePath();
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     TestGeneratorInputOperator o1 = dag.addOperator("o1", TestGeneratorInputOperator.class);
     GenericTestOperator o2 = dag.addOperator("o2", GenericTestOperator.class);
     dag.addStream("o1.outport", o1.outport, o2.inport1);
@@ -735,7 +765,6 @@ public class StreamingContainerManagerTest {
           Class<? extends TestAppDataSourceOperator> dsClass, Class<? extends TestAppDataResultOperator> rClass)
   {
     LogicalPlan dag = new LogicalPlan();
-
     TestGeneratorInputOperator o1 = dag.addOperator("o1", TestGeneratorInputOperator.class);
     TestAppDataQueryOperator q = dag.addOperator("q", qClass);
     TestAppDataResultOperator r = dag.addOperator("r", rClass);
@@ -755,6 +784,8 @@ public class StreamingContainerManagerTest {
 
   private void testAppDataSources(LogicalPlan dag, boolean appendQIDToTopic) throws Exception
   {
+    String workingDir = new File("target/testAppDataSources").getAbsolutePath();
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     StramLocalCluster lc = new StramLocalCluster(dag);
     lc.runAsync();
     StreamingContainerManager dnmgr = lc.dnmgr;

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/debug/TupleRecorderTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/debug/TupleRecorderTest.java b/engine/src/test/java/com/datatorrent/stram/debug/TupleRecorderTest.java
index 5f68c6a..718bf1b 100644
--- a/engine/src/test/java/com/datatorrent/stram/debug/TupleRecorderTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/debug/TupleRecorderTest.java
@@ -32,6 +32,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 
+import com.datatorrent.api.Context;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.engine.StreamingContainer;
 import com.datatorrent.stram.StramLocalCluster;
 import com.datatorrent.stram.debug.TupleRecorder.PortInfo;
@@ -210,6 +212,7 @@ public class TupleRecorderTest
   public void testRecordingFlow() throws Exception
   {
     LogicalPlan dag = new LogicalPlan();
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testWorkDir.getAbsolutePath() + "/localPath", testWorkDir.getAbsolutePath(), null));
 
     dag.getAttributes().put(LogicalPlan.APPLICATION_PATH, "file://" + testWorkDir.getAbsolutePath());
     dag.getAttributes().put(LogicalPlan.TUPLE_RECORDING_PART_FILE_SIZE, 1024);  // 1KB per part

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java b/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java
index a95956e..752adeb 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/AutoMetricTest.java
@@ -41,6 +41,7 @@ import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.Stats.OperatorStats;
 
 import com.datatorrent.common.partitioner.StatelessPartitioner;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.StramLocalCluster;
 import com.datatorrent.stram.engine.AutoMetricTest.TestOperator.TestStatsListener;
 import com.datatorrent.stram.plan.logical.LogicalPlan;
@@ -183,6 +184,7 @@ public class AutoMetricTest
   public void testMetricPropagation() throws Exception
   {
     LogicalPlan dag = new LogicalPlan();
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testMeta.dir + "/localPath", testMeta.dir, null));
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/engine/InputOperatorTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/InputOperatorTest.java b/engine/src/test/java/com/datatorrent/stram/engine/InputOperatorTest.java
index 83bd61f..142f45f 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/InputOperatorTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/InputOperatorTest.java
@@ -20,6 +20,7 @@ import com.datatorrent.stram.plan.logical.LogicalPlan;
 import com.datatorrent.stram.support.StramTestSupport;
 import com.datatorrent.stram.support.StramTestSupport.WaitCondition;
 import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.api.DAG.Locality;
 import com.datatorrent.api.DefaultInputPort;
 import com.datatorrent.api.DefaultOutputPort;
@@ -28,13 +29,13 @@ import com.datatorrent.api.Operator;
 import com.datatorrent.api.Context.OperatorContext;
 import com.datatorrent.netlet.util.CircularBuffer;
 
+import java.io.File;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.junit.Assert;
-
 import org.junit.Test;
 
 /**
@@ -124,6 +125,8 @@ public class InputOperatorTest
   public void testSomeMethod() throws Exception
   {
     LogicalPlan dag = new LogicalPlan();
+    String testWorkDir = new File("target").getAbsolutePath();
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(testWorkDir + "/localBasePath", testWorkDir, null));
     EvenOddIntegerGeneratorInputOperator generator = dag.addOperator("NumberGenerator", EvenOddIntegerGeneratorInputOperator.class);
     final CollectorModule<Number> collector = dag.addOperator("NumberCollector", new CollectorModule<Number>());
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java b/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java
index bada257..0393394 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/ProcessingModeTests.java
@@ -15,7 +15,10 @@
  */
 package com.datatorrent.stram.engine;
 
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.BaseOperator;
+
+import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashSet;
@@ -75,6 +78,8 @@ public class ProcessingModeTests
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
+    String workingDir = new File("target/testLinearInputOperatorRecovery").getAbsolutePath();
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     RecoverableInputOperator rip = dag.addOperator("LongGenerator", RecoverableInputOperator.class);
     rip.setMaximumTuples(maxTuples);
     rip.setSimulateFailure(true);
@@ -97,6 +102,8 @@ public class ProcessingModeTests
     CollectorOperator.duplicates.clear();
 
     LogicalPlan dag = new LogicalPlan();
+    String workingDir = new File("target/testLinearOperatorRecovery").getAbsolutePath();
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);
@@ -121,6 +128,8 @@ public class ProcessingModeTests
     CollectorOperator.duplicates.clear();
 
     LogicalPlan dag = new LogicalPlan();
+    String workingDir = new File("target/testLinearInlineOperatorsRecovery").getAbsolutePath();
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     dag.getAttributes().put(LogicalPlan.CHECKPOINT_WINDOW_COUNT, 2);
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 300);
     dag.getAttributes().put(LogicalPlan.CONTAINERS_MAX_COUNT, 1);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/engine/SliderTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/SliderTest.java b/engine/src/test/java/com/datatorrent/stram/engine/SliderTest.java
index 754b150..26515d4 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/SliderTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/SliderTest.java
@@ -15,6 +15,9 @@
  */
 package com.datatorrent.stram.engine;
 
+import java.io.File;
+
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.BaseOperator;
 import org.junit.Assert;
 import org.junit.Test;
@@ -133,6 +136,8 @@ public class SliderTest
   private void test(int applicationWindowCount, int slideByWindowCount) throws Exception
   {
     LogicalPlan dag = new LogicalPlan();
+    String workingDir = new File("target/sliderTest").getAbsolutePath();
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 100);
     Input input = dag.addOperator("Input", new Input());
     Sum sum = dag.addOperator("Sum", new Sum());

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/engine/StatsTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/StatsTest.java b/engine/src/test/java/com/datatorrent/stram/engine/StatsTest.java
index 0019f56..0ededd4 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/StatsTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/StatsTest.java
@@ -15,6 +15,7 @@
  */
 package com.datatorrent.stram.engine;
 
+import java.io.File;
 import java.io.Serializable;
 import java.util.*;
 
@@ -23,12 +24,14 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.datatorrent.common.util.BaseOperator;
+import com.datatorrent.api.Context;
 import com.datatorrent.api.DAG;
 import com.datatorrent.api.Stats.OperatorStats;
 import com.datatorrent.api.Stats.OperatorStats.PortStats;
 import com.datatorrent.api.StatsListener;
 
+import com.datatorrent.common.util.AsyncFSStorageAgent;
+import com.datatorrent.common.util.BaseOperator;
 import com.datatorrent.stram.StramLocalCluster;
 import com.datatorrent.stram.StreamingContainerManager;
 import com.datatorrent.stram.engine.StatsTest.TestCollector.TestCollectorStatsListener;
@@ -170,7 +173,8 @@ public class StatsTest
   {
     int tupleCount = 10;
     LogicalPlan dag = new LogicalPlan();
-
+    String workingDir = new File("target").getAbsolutePath();
+    dag.setAttribute(OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     TestOperator testOper = dag.addOperator("TestOperator", TestOperator.class);
     TestInputStatsListener testInputStatsListener = new TestInputStatsListener();
     dag.setAttribute(testOper, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{testInputStatsListener}));
@@ -225,6 +229,8 @@ public class StatsTest
   private void baseTestForQueueSize(int maxTuples, TestCollectorStatsListener statsListener, DAG.Locality locality) throws Exception
   {
     LogicalPlan dag = new LogicalPlan();
+    String workingDir = new File("target/baseTestForQueueSize").getAbsolutePath();
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     dag.getAttributes().put(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS, 200);
     TestOperator testOper = dag.addOperator("TestOperator", TestOperator.class);
     testOper.setMaxTuples(maxTuples);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/engine/WindowGeneratorTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/engine/WindowGeneratorTest.java b/engine/src/test/java/com/datatorrent/stram/engine/WindowGeneratorTest.java
index a6897e0..4f7b842 100644
--- a/engine/src/test/java/com/datatorrent/stram/engine/WindowGeneratorTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/engine/WindowGeneratorTest.java
@@ -15,6 +15,7 @@
  */
 package com.datatorrent.stram.engine;
 
+import java.io.File;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
@@ -24,12 +25,11 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.common.util.BaseOperator;
+
+import com.datatorrent.api.*;
 import com.datatorrent.api.Context.OperatorContext;
-import com.datatorrent.api.DefaultInputPort;
-import com.datatorrent.api.DefaultOutputPort;
-import com.datatorrent.api.InputOperator;
-import com.datatorrent.api.Sink;
 
 import com.datatorrent.bufferserver.packet.MessageType;
 import com.datatorrent.common.util.ScheduledThreadPoolExecutor;
@@ -305,7 +305,8 @@ public class WindowGeneratorTest
   {
     logger.info("Testing Out of Sequence Error");
     LogicalPlan dag = new LogicalPlan();
-
+    String workingDir = new File("target/testOutofSequenceError").getAbsolutePath();
+    dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
     RandomNumberGenerator rng = dag.addOperator("random", new RandomNumberGenerator());
     MyLogger ml = dag.addOperator("logger", new MyLogger());
 

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/29eb6c37/engine/src/test/java/com/datatorrent/stram/webapp/StramWebServicesTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/webapp/StramWebServicesTest.java b/engine/src/test/java/com/datatorrent/stram/webapp/StramWebServicesTest.java
index 49c7844..9b8f0b2 100644
--- a/engine/src/test/java/com/datatorrent/stram/webapp/StramWebServicesTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/webapp/StramWebServicesTest.java
@@ -56,6 +56,8 @@ import static org.junit.Assert.*;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
 
+import com.datatorrent.api.Context;
+import com.datatorrent.common.util.AsyncFSStorageAgent;
 import com.datatorrent.stram.StramAppContext;
 import com.datatorrent.stram.StreamingContainerManager;
 import com.datatorrent.stram.plan.logical.LogicalPlan;
@@ -125,7 +127,9 @@ public class StramWebServicesTest extends JerseyTest
       protected void configureServlets()
       {
         LogicalPlan dag = new LogicalPlan();
-        dag.setAttribute(LogicalPlan.APPLICATION_PATH, new File("target", StramWebServicesTest.class.getName()).getAbsolutePath());
+        String workingDir = new File("target", StramWebServicesTest.class.getName()).getAbsolutePath();
+        dag.setAttribute(LogicalPlan.APPLICATION_PATH, workingDir);
+        dag.setAttribute(Context.OperatorContext.STORAGE_AGENT, new AsyncFSStorageAgent(workingDir + "/localPath", workingDir, null));
         final DummyStreamingContainerManager streamingContainerManager = new DummyStreamingContainerManager(dag);
 
         appContext = new TestAppContext();


[25/50] incubator-apex-core git commit: Rethrow exception so compiler (and others) know about it.

Posted by ch...@apache.org.
Rethrow exception so compiler (and others) know about it.


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/39ef1cf9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/39ef1cf9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/39ef1cf9

Branch: refs/heads/master
Commit: 39ef1cf94c01605a5cdaac85051b52f74b41184c
Parents: d813963
Author: thomas <th...@datatorrent.com>
Authored: Tue Aug 11 10:36:13 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Tue Aug 11 10:36:13 2015 -0700

----------------------------------------------------------------------
 .../main/java/com/datatorrent/stram/StreamingContainerManager.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/39ef1cf9/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index 6840288..64850f5 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@ -362,7 +362,7 @@ public class StreamingContainerManager implements PlanContext
       this.containerFile = new FSJsonLineFile(new Path(this.vars.appPath + "/containers"), FsPermission.getDefault());
       this.containerFile.append(getAppMasterContainerInfo());
     } catch (IOException ex) {
-      DTThrowable.rethrow(ex);
+      throw DTThrowable.wrapIfChecked(ex);
     }
   }
 


[33/50] incubator-apex-core git commit: APEX-36: calling tmp file '_tmp' instead of '._COPYING_' which is reserved by hadoop

Posted by ch...@apache.org.
APEX-36: calling tmp file '_tmp' instead of '._COPYING_' which is reserved by hadoop


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/7560cefd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/7560cefd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/7560cefd

Branch: refs/heads/master
Commit: 7560cefd9cc5bd0a0463b59d83ad26ce5a11d70d
Parents: 9d08532
Author: Chandni Singh <ch...@datatorrent.com>
Authored: Mon Aug 17 11:20:57 2015 -0700
Committer: Chandni Singh <ch...@datatorrent.com>
Committed: Mon Aug 17 12:29:59 2015 -0700

----------------------------------------------------------------------
 .../src/main/java/com/datatorrent/common/util/FSStorageAgent.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/7560cefd/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java b/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java
index 14275fa..feb9ae2 100644
--- a/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java
+++ b/common/src/main/java/com/datatorrent/common/util/FSStorageAgent.java
@@ -42,7 +42,7 @@ import com.datatorrent.netlet.util.DTThrowable;
  */
 public class FSStorageAgent implements StorageAgent, Serializable
 {
-  public static final String TMP_FILE = "._COPYING_";
+  public static final String TMP_FILE = "_tmp";
   protected static final String STATELESS_CHECKPOINT_WINDOW_ID = Long.toHexString(Stateless.WINDOW_ID);
   public final String path;
   protected final transient FileContext fileContext;


[45/50] incubator-apex-core git commit: update the japicmp plugin

Posted by ch...@apache.org.
update  the japicmp plugin


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/b2fb001d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/b2fb001d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/b2fb001d

Branch: refs/heads/master
Commit: b2fb001d1a5cb6795e9298e012e9e23fd19ead38
Parents: e54e94b
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Fri Aug 21 17:39:44 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Fri Aug 21 17:39:44 2015 -0700

----------------------------------------------------------------------
 api/pom.xml    | 2 +-
 common/pom.xml | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b2fb001d/api/pom.xml
----------------------------------------------------------------------
diff --git a/api/pom.xml b/api/pom.xml
index 6bdff7f..12cdd51 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -32,7 +32,7 @@
       <plugin>
         <groupId>com.github.siom79.japicmp</groupId>
         <artifactId>japicmp-maven-plugin</artifactId>
-        <version>0.5.1</version>
+        <version>0.5.3</version>
         <configuration>
           <oldVersion>
             <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b2fb001d/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index c3166bc..1a9acfd 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -17,7 +17,7 @@
       <plugin>
         <groupId>com.github.siom79.japicmp</groupId>
         <artifactId>japicmp-maven-plugin</artifactId>
-        <version>0.5.1</version>
+        <version>0.5.3</version>
         <configuration>
           <oldVersion>
             <dependency>


[34/50] incubator-apex-core git commit: APEX-37 Added APPLICATION_ATTEMPT_ID attribute and change operator and container history file so that different application attempts write to separate HDFS files

Posted by ch...@apache.org.
APEX-37 Added APPLICATION_ATTEMPT_ID attribute and change operator and container history file so that different application attempts write to separate HDFS files


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/b8c0b4cc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/b8c0b4cc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/b8c0b4cc

Branch: refs/heads/master
Commit: b8c0b4cc06b1d3b4a94c9027da0cb44da83f4c28
Parents: 9d08532
Author: David Yan <da...@datatorrent.com>
Authored: Thu Aug 6 18:35:32 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Tue Aug 18 13:45:14 2015 -0700

----------------------------------------------------------------------
 .../stram/StreamingAppMasterService.java        |  2 +-
 .../stram/StreamingContainerManager.java        | 66 +++++++++++---------
 .../stram/plan/logical/LogicalPlan.java         |  5 ++
 .../datatorrent/stram/util/FSJsonLineFile.java  | 24 ++-----
 .../datatorrent/stram/StramRecoveryTest.java    |  2 +-
 5 files changed, 46 insertions(+), 53 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b8c0b4cc/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java b/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
index 5246c9e..98c78de 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingAppMasterService.java
@@ -511,7 +511,7 @@ public class StreamingAppMasterService extends CompositeService
     if (dag.isDebug()) {
       dumpOutDebugInfo();
     }
-
+    dag.setAttribute(LogicalPlan.APPLICATION_ATTEMPT_ID, appAttemptID.getAttemptId());
     FSRecoveryHandler recoveryHandler = new FSRecoveryHandler(dag.assertAppPath(), conf);
     this.dnmgr = StreamingContainerManager.getInstance(recoveryHandler, dag, true);
     dag = this.dnmgr.getLogicalPlan();

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b8c0b4cc/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
index 64850f5..6e0f3f5 100644
--- a/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
+++ b/engine/src/main/java/com/datatorrent/stram/StreamingContainerManager.java
@@ -127,6 +127,8 @@ public class StreamingContainerManager implements PlanContext
   private final static Logger LOG = LoggerFactory.getLogger(StreamingContainerManager.class);
   public final static String GATEWAY_LOGIN_URL_PATH = "/ws/v2/login";
   public final static String BUILTIN_APPDATA_URL = "builtin";
+  public final static String CONTAINERS_INFO_FILENAME_FORMAT = "containers_%d.json";
+  public final static String OPERATORS_INFO_FILENAME_FORMAT = "operators_%d.json";
   public final static String APP_META_FILENAME = "meta.json";
   public final static String APP_META_KEY_ATTRIBUTES = "attributes";
   public final static String APP_META_KEY_METRICS = "metrics";
@@ -205,7 +207,7 @@ public class StreamingContainerManager implements PlanContext
   };
 
   private FSJsonLineFile containerFile;
-  private final ConcurrentMap<Integer, FSJsonLineFile> operatorFiles = Maps.newConcurrentMap();
+  private FSJsonLineFile operatorFile;
 
   private final long startTime = System.currentTimeMillis();
 
@@ -359,8 +361,11 @@ public class StreamingContainerManager implements PlanContext
       Configuration config = new YarnConfiguration();
       fileContext = uri.getScheme() == null ? FileContext.getFileContext(config) : FileContext.getFileContext(uri, config);
       saveMetaInfo();
-      this.containerFile = new FSJsonLineFile(new Path(this.vars.appPath + "/containers"), FsPermission.getDefault());
+      String fileName = String.format(CONTAINERS_INFO_FILENAME_FORMAT, plan.getLogicalPlan().getValue(LogicalPlan.APPLICATION_ATTEMPT_ID));
+      this.containerFile = new FSJsonLineFile(fileContext, new Path(this.vars.appPath, fileName), FsPermission.getDefault());
       this.containerFile.append(getAppMasterContainerInfo());
+      fileName = String.format(OPERATORS_INFO_FILENAME_FORMAT, plan.getLogicalPlan().getValue(LogicalPlan.APPLICATION_ATTEMPT_ID));
+      this.operatorFile = new FSJsonLineFile(fileContext, new Path(this.vars.appPath, fileName), FsPermission.getDefault());
     } catch (IOException ex) {
       throw DTThrowable.wrapIfChecked(ex);
     }
@@ -490,9 +495,7 @@ public class StreamingContainerManager implements PlanContext
     }
 
     IOUtils.closeQuietly(containerFile);
-    for (FSJsonLineFile operatorFile : operatorFiles.values()) {
-      IOUtils.closeQuietly(operatorFile);
-    }
+    IOUtils.closeQuietly(operatorFile);
     if(poolExecutor != null) {
       poolExecutor.shutdown();
     }
@@ -854,8 +857,7 @@ public class StreamingContainerManager implements PlanContext
   private void saveMetaInfo() throws IOException
   {
     Path file = new Path(this.vars.appPath, APP_META_FILENAME + "." + System.nanoTime());
-    try (FSDataOutputStream os = fileContext.create(file, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), Options.CreateOpts.CreateParent.createParent())) {
-      JSONObject top = new JSONObject();
+    try (FSDataOutputStream os = fileContext.create(file, EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE), Options.CreateOpts.CreateParent.createParent())) {      JSONObject top = new JSONObject();
       JSONObject attributes = new JSONObject();
       for (Map.Entry<Attribute<?>, Object> entry : this.plan.getLogicalPlan().getAttributes().entrySet()) {
         attributes.put(entry.getKey().getSimpleName(), entry.getValue());
@@ -1379,7 +1381,7 @@ public class StreamingContainerManager implements PlanContext
   {
     long currentTimeMillis = clock.getTime();
 
-    StreamingContainerAgent sca = this.containers.get(heartbeat.getContainerId());
+    final StreamingContainerAgent sca = this.containers.get(heartbeat.getContainerId());
     if (sca == null || sca.container.getState() == PTContainer.State.KILLED) {
       // could be orphaned container that was replaced and needs to terminate
       LOG.error("Unknown container {}", heartbeat.getContainerId());
@@ -1395,34 +1397,35 @@ public class StreamingContainerManager implements PlanContext
         sca.container.bufferServerAddress = InetSocketAddress.createUnresolved(heartbeat.bufferServerHost, heartbeat.bufferServerPort);
         LOG.info("Container {} buffer server: {}", sca.container.getExternalId(), sca.container.bufferServerAddress);
       }
-      long containerStartTime = System.currentTimeMillis();
+      final long containerStartTime = System.currentTimeMillis();
       sca.container.setState(PTContainer.State.ACTIVE);
       sca.container.setStartedTime(containerStartTime);
       sca.container.setFinishedTime(-1);
       sca.jvmName = heartbeat.jvmName;
-      try {
-        containerFile.append(sca.getContainerInfo());
-      }
-      catch (IOException ex) {
-        LOG.warn("Cannot write to container file");
-      }
-      for (PTOperator ptOp : sca.container.getOperators()) {
-        try {
-          FSJsonLineFile operatorFile = operatorFiles.get(ptOp.getId());
-          if (operatorFile == null) {
-            operatorFiles.putIfAbsent(ptOp.getId(), new FSJsonLineFile(new Path(this.vars.appPath + "/operators/" + ptOp.getId()), FsPermission.getDefault()));
-            operatorFile = operatorFiles.get(ptOp.getId());
+      poolExecutor.submit(new Runnable()
+      {
+        @Override
+        public void run()
+        {
+          try {
+            containerFile.append(sca.getContainerInfo());
+          } catch (IOException ex) {
+            LOG.warn("Cannot write to container file");
+          }
+          for (PTOperator ptOp : sca.container.getOperators()) {
+            try {
+              JSONObject operatorInfo = new JSONObject();
+              operatorInfo.put("name", ptOp.getName());
+              operatorInfo.put("id", ptOp.getId());
+              operatorInfo.put("container", sca.container.getExternalId());
+              operatorInfo.put("startTime", containerStartTime);
+              operatorFile.append(operatorInfo);
+            } catch (IOException | JSONException ex) {
+              LOG.warn("Cannot write to operator file: ", ex);
+            }
           }
-          JSONObject operatorInfo = new JSONObject();
-          operatorInfo.put("name", ptOp.getName());
-          operatorInfo.put("container", sca.container.getExternalId());
-          operatorInfo.put("startTime", containerStartTime);
-          operatorFile.append(operatorInfo);
-        }
-        catch (Exception ex) {
-          LOG.warn("Cannot write to operator file: ", ex);
         }
-      }
+      });
     }
 
     if (heartbeat.restartRequested) {
@@ -2823,9 +2826,10 @@ public class StreamingContainerManager implements PlanContext
         scm = new StreamingContainerManager(dag, enableEventRecording, new SystemClock());
       }
       else {
-        scm = new StreamingContainerManager(checkpointedState, enableEventRecording);
         // find better way to support final transient members
         PhysicalPlan plan = checkpointedState.physicalPlan;
+        plan.getLogicalPlan().setAttribute(LogicalPlan.APPLICATION_ATTEMPT_ID, dag.getAttributes().get(LogicalPlan.APPLICATION_ATTEMPT_ID));
+        scm = new StreamingContainerManager(checkpointedState, enableEventRecording);
         for (Field f : plan.getClass().getDeclaredFields()) {
           if (f.getType() == PlanContext.class) {
             f.setAccessible(true);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b8c0b4cc/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
index d140d17..2d088b8 100644
--- a/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
+++ b/engine/src/main/java/com/datatorrent/stram/plan/logical/LogicalPlan.java
@@ -130,6 +130,11 @@ public class LogicalPlan implements Serializable, DAG
    */
   public static Attribute<Integer> CONTAINERS_MAX_COUNT = new Attribute<Integer>(Integer.MAX_VALUE);
 
+  /**
+   * The application attempt ID from YARN
+   */
+  public static Attribute<Integer> APPLICATION_ATTEMPT_ID = new Attribute<>(1);
+
   static {
     Attribute.AttributeMap.AttributeInitializer.initialize(LogicalPlan.class);
   }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b8c0b4cc/engine/src/main/java/com/datatorrent/stram/util/FSJsonLineFile.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/util/FSJsonLineFile.java b/engine/src/main/java/com/datatorrent/stram/util/FSJsonLineFile.java
index 7935ce4..3b5a31e 100644
--- a/engine/src/main/java/com/datatorrent/stram/util/FSJsonLineFile.java
+++ b/engine/src/main/java/com/datatorrent/stram/util/FSJsonLineFile.java
@@ -17,7 +17,8 @@ package com.datatorrent.stram.util;
 
 import java.io.Closeable;
 import java.io.IOException;
-import org.apache.hadoop.conf.Configuration;
+import java.util.EnumSet;
+
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.codehaus.jackson.map.ObjectMapper;
@@ -32,29 +33,13 @@ import org.slf4j.LoggerFactory;
  */
 public class FSJsonLineFile implements Closeable
 {
-  private final FileSystem fs;
   private final ObjectMapper objectMapper;
   private final FSDataOutputStream os;
   private static final Logger LOG = LoggerFactory.getLogger(FSJsonLineFile.class);
 
-  public FSJsonLineFile(Path path, FsPermission permission) throws IOException
+  public FSJsonLineFile(FileContext fileContext, Path path, FsPermission permission) throws IOException
   {
-    fs = FileSystem.newInstance(path.toUri(), new Configuration());
-    FSDataOutputStream myos;
-    if (fs.exists(path)) {
-      try {
-        // happens if not the first application attempt
-        myos = fs.append(path);
-      }
-      catch (IOException ex) {
-        LOG.warn("Caught exception (OK during unit test): {}", ex.getMessage());
-        myos = FileSystem.create(fs, path, permission);
-      }
-    }
-    else {
-      myos = FileSystem.create(fs, path, permission);
-    }
-    os = myos;
+    this.os = fileContext.create(path, EnumSet.of(CreateFlag.CREATE, CreateFlag.APPEND), Options.CreateOpts.perms(permission));
     this.objectMapper = (new JSONSerializationProvider()).getContext(null);
   }
 
@@ -74,7 +59,6 @@ public class FSJsonLineFile implements Closeable
   public void close() throws IOException
   {
     os.close();
-    fs.close();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/b8c0b4cc/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java b/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java
index 6172d8a..ab2092a 100644
--- a/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/StramRecoveryTest.java
@@ -391,6 +391,7 @@ public class StramRecoveryTest
     LogicalPlan dag = new LogicalPlan();
     dag.setAttribute(LogicalPlan.APPLICATION_ID, appId1);
     dag.setAttribute(LogicalPlan.APPLICATION_PATH, appPath1);
+    dag.setAttribute(LogicalPlan.APPLICATION_ATTEMPT_ID, 1);
     dag.setAttribute(OperatorContext.STORAGE_AGENT, agent);
     dag.addOperator("o1", StatsListeningOperator.class);
 
@@ -408,7 +409,6 @@ public class StramRecoveryTest
     PTOperator o1p1 = plan.getOperators(dag.getOperatorMeta("o1")).get(0);
     long[] ids = new FSStorageAgent(appPath1 + "/" + LogicalPlan.SUBDIR_CHECKPOINTS, new Configuration()).getWindowIds(o1p1.getId());
     Assert.assertArrayEquals(new long[] {o1p1.getRecoveryCheckpoint().getWindowId()}, ids);
-
     Assert.assertNull(o1p1.getContainer().getExternalId());
     // trigger journal write
     o1p1.getContainer().setExternalId("cid1");


[29/50] incubator-apex-core git commit: APEX-43: assigning classToStringCodec to TUPLE_CLASS attr in PortContext

Posted by ch...@apache.org.
APEX-43: assigning classToStringCodec to TUPLE_CLASS attr in PortContext


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/93b8c661
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/93b8c661
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/93b8c661

Branch: refs/heads/master
Commit: 93b8c661942a13449975123904d24ab71c69de7a
Parents: d813963
Author: Chandni Singh <ch...@datatorrent.com>
Authored: Tue Aug 11 13:33:04 2015 -0700
Committer: Chandni Singh <ch...@datatorrent.com>
Committed: Tue Aug 11 14:59:56 2015 -0700

----------------------------------------------------------------------
 .../main/java/com/datatorrent/api/Context.java  |  2 +-
 .../plan/LogicalPlanConfigurationTest.java      | 32 +++++++++++++++-----
 2 files changed, 26 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/93b8c661/api/src/main/java/com/datatorrent/api/Context.java
----------------------------------------------------------------------
diff --git a/api/src/main/java/com/datatorrent/api/Context.java b/api/src/main/java/com/datatorrent/api/Context.java
index 249cecd..cd10398 100644
--- a/api/src/main/java/com/datatorrent/api/Context.java
+++ b/api/src/main/java/com/datatorrent/api/Context.java
@@ -156,7 +156,7 @@ public interface Context
      * Provides the tuple class which the port receives or emits. While this attribute is null by default,
      * whether it is needed or not is controlled through the port annotation.
      */
-    Attribute<Class<?>> TUPLE_CLASS = new Attribute<>(new Object2String<Class<?>>());
+    Attribute<Class<?>> TUPLE_CLASS = new Attribute<>(new Class2String<>());
 
     @SuppressWarnings("FieldNameHidesFieldInSuperclass")
     long serialVersionUID = AttributeMap.AttributeInitializer.initialize(PortContext.class);

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/93b8c661/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java b/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
index af12575..3b6bdd1 100644
--- a/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/plan/LogicalPlanConfigurationTest.java
@@ -713,7 +713,7 @@ public class LogicalPlanConfigurationTest {
     }
   }
 
-  @Test
+  @Test(expected = ValidationException.class)
   public void testTupleClassAttrValidation() throws Exception
   {
     String resourcePath = "/schemaTestTopology.json";
@@ -733,12 +733,30 @@ public class LogicalPlanConfigurationTest {
     LogicalPlanConfiguration planConf = new LogicalPlanConfiguration(conf);
     LogicalPlan dag = planConf.createFromJson(json, "testLoadFromJson");
 
-    try {
-      dag.validate();
-      Assert.fail();
-    } catch (ValidationException ve) {
-      //test pass as validation exception was thrown.
-    }
+    dag.validate();
+  }
+
+  @Test
+  public void testTestTupleClassAttrSetFromConfig()
+  {
+    Configuration conf = new Configuration(false);
+    conf.set(StreamingApplication.DT_PREFIX + "operator.o2.port.schemaRequiredPort.attr.TUPLE_CLASS",
+      "com.datatorrent.stram.plan.LogicalPlanConfigurationTest$TestSchema");
+
+    StreamingApplication streamingApplication = new StreamingApplication()
+    {
+      @Override
+      public void populateDAG(DAG dag, Configuration conf)
+      {
+        TestGeneratorInputOperator o1 = dag.addOperator("o1", new TestGeneratorInputOperator());
+        SchemaTestOperator o2 = dag.addOperator("o2", new SchemaTestOperator());
+        dag.addStream("stream", o1.outport, o2.schemaRequiredPort);
+      }
+    };
+    LogicalPlan dag = new LogicalPlan();
+    LogicalPlanConfiguration lpc = new LogicalPlanConfiguration(conf);
+    lpc.prepareDAG(dag, streamingApplication, "app");
+    dag.validate();
   }
 
   private static final Logger logger = LoggerFactory.getLogger(LogicalPlanConfigurationTest.class);


[31/50] incubator-apex-core git commit: SPOI-5338 #resolve cleanup of operator discoverer class to replace reflection with ASM Addressed review comments

Posted by ch...@apache.org.
SPOI-5338 #resolve cleanup of operator discoverer class to replace reflection with ASM
Addressed review comments


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/ba46e71f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/ba46e71f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/ba46e71f

Branch: refs/heads/master
Commit: ba46e71f5744e4e822d0f11a35e00b28ee91edb9
Parents: fe5d035
Author: ishark <is...@datatorrent.com>
Authored: Tue Jul 7 16:40:23 2015 -0700
Committer: ishark <is...@datatorrent.com>
Committed: Thu Aug 13 14:38:12 2015 -0700

----------------------------------------------------------------------
 .../java/com/datatorrent/stram/cli/DTCli.java   |  18 +-
 .../stram/webapp/OperatorDiscoverer.java        | 204 +++++++++----------
 .../stram/webapp/StramWebServices.java          |   8 +-
 .../com/datatorrent/stram/webapp/TypeGraph.java |  86 ++++++--
 .../stram/webapp/OperatorDiscoveryTest.java     |   9 +-
 5 files changed, 175 insertions(+), 150 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ba46e71f/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java b/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
index eff2404..ff6e84c 100644
--- a/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
+++ b/engine/src/main/java/com/datatorrent/stram/cli/DTCli.java
@@ -86,7 +86,6 @@ import com.datatorrent.stram.plan.logical.LogicalPlan;
 import com.datatorrent.stram.plan.logical.requests.*;
 import com.datatorrent.stram.security.StramUserLogin;
 import com.datatorrent.stram.util.JSONSerializationProvider;
-import com.datatorrent.stram.util.ObjectMapperFactory;
 import com.datatorrent.stram.util.VersionInfo;
 import com.datatorrent.stram.util.WebServicesClient;
 import com.datatorrent.stram.webapp.OperatorDiscoverer;
@@ -3002,26 +3001,22 @@ public class DTCli
       String[] jarFiles = files.split(",");
       File tmpDir = copyToLocal(jarFiles);
       try {
-        ObjectMapper defaultValueMapper = ObjectMapperFactory.getOperatorValueSerializer();
-        
         OperatorDiscoverer operatorDiscoverer = new OperatorDiscoverer(jarFiles);
         String searchTerm = commandLineInfo.args.length > 1 ? commandLineInfo.args[1] : null;
-        Set<Class<? extends Operator>> operatorClasses = operatorDiscoverer.getOperatorClasses(parentName, searchTerm);
+        Set<String> operatorClasses = operatorDiscoverer.getOperatorClasses(parentName, searchTerm);
         JSONObject json = new JSONObject();
         JSONArray arr = new JSONArray();
         JSONObject portClassHier = new JSONObject();
+        JSONObject portTypesWithSchemaClasses = new JSONObject();
 
         JSONObject failed = new JSONObject();
-        JSONObject portTypesWithSchemaClasses = new JSONObject();
 
-        for (Class<? extends Operator> clazz : operatorClasses) {
+        for (final String clazz : operatorClasses) {
           try {
             JSONObject oper = operatorDiscoverer.describeOperator(clazz);
 
             // add default value
-            Operator operIns = clazz.newInstance();
-            String s = defaultValueMapper.writeValueAsString(operIns);
-            oper.put("defaultValue", new JSONObject(s).get(clazz.getName()));
+            operatorDiscoverer.addDefaultValue(clazz, oper);
             
             // add class hierarchy info to portClassHier and fetch port types with schema classes
             operatorDiscoverer.buildAdditionalPortInfo(oper, portClassHier, portTypesWithSchemaClasses);
@@ -3036,7 +3031,7 @@ public class DTCli
             arr.put(oper);
           } catch (Exception | NoClassDefFoundError ex) {
             // ignore this class
-            final String cls = clazz.getName();
+            final String cls = clazz;
             failed.put(cls, ex.toString());
           }
         }
@@ -3053,7 +3048,6 @@ public class DTCli
         FileUtils.deleteDirectory(tmpDir);
       }
     }
-
   }
 
   private class GetJarOperatorPropertiesCommand implements Command
@@ -3070,7 +3064,7 @@ public class DTCli
       try {
         OperatorDiscoverer operatorDiscoverer = new OperatorDiscoverer(jarFiles);
         Class<? extends Operator> operatorClass = operatorDiscoverer.getOperatorClass(args[2]);
-        printJson(operatorDiscoverer.describeOperator(operatorClass));
+        printJson(operatorDiscoverer.describeOperator(operatorClass.getName()));
       } finally {
         FileUtils.deleteDirectory(tmpDir);
       }

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ba46e71f/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java b/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
index 004c100..0867b03 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/OperatorDiscoverer.java
@@ -15,10 +15,11 @@
  */
 package com.datatorrent.stram.webapp;
 
-import com.datatorrent.api.InputOperator;
 import com.datatorrent.api.Operator;
 import com.datatorrent.netlet.util.DTThrowable;
+import com.datatorrent.stram.util.ObjectMapperFactory;
 import com.datatorrent.stram.webapp.TypeDiscoverer.UI_TYPE;
+import com.datatorrent.stram.webapp.TypeGraph.TypeGraphVertex;
 import com.datatorrent.stram.webapp.asm.CompactAnnotationNode;
 import com.datatorrent.stram.webapp.asm.CompactFieldNode;
 import com.google.common.base.Predicate;
@@ -43,8 +44,11 @@ import java.util.regex.Pattern;
 
 import javax.xml.parsers.*;
 
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.ClassUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.text.WordUtils;
+import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jettison.json.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -62,17 +66,7 @@ import org.xml.sax.helpers.DefaultHandler;
 public class OperatorDiscoverer
 {
   public static final String GENERATED_CLASSES_JAR = "_generated-classes.jar";
-
-  private static class ClassComparator implements Comparator<Class<?>> {
-
-    @Override
-    public int compare(Class<?> a, Class<?> b)
-    {
-      return a.getName().compareTo(b.getName());
-    }
-
-  }
-  private final Set<Class<? extends Operator>> operatorClasses = new TreeSet<Class<? extends Operator>>(new ClassComparator());
+  private Set<String> operatorClassNames;
   private static final Logger LOG = LoggerFactory.getLogger(OperatorDiscoverer.class);
   private final List<String> pathsToScan = new ArrayList<String>();
   private final ClassLoader classLoader;
@@ -261,26 +255,21 @@ public class OperatorDiscoverer
     classLoader = new URLClassLoader(urls, ClassLoader.getSystemClassLoader());
   }
 
-
-  @SuppressWarnings({ "unchecked" })
   private void loadOperatorClass()
   {
     buildTypeGraph();
-    String operatorRoot = Operator.class.getName();
-    Set<String> allOperatorClasses = typeGraph.getDescendants(operatorRoot);
-//    ClassLoader cLoader = new URLClassLoader();
-    for (String opClassName : allOperatorClasses) {
-      try {
-        Class<?> clazz = classLoader.loadClass(opClassName);
-//        typeGraph.get(opClassName).loadedClass = clazz;
-        if (isInstantiableOperatorClass(clazz)) {
-          LOG.debug("Adding class {} as an operator", clazz.getName());
-          operatorClasses.add((Class<? extends Operator>)clazz);
-        }
-      }
-      catch (Throwable ex) {
-        LOG.warn("Class cannot be loaded: {} (error was {})", opClassName, ex.getMessage());
-      }
+    operatorClassNames =  typeGraph.getAllDTInstantiableOperators();
+  }
+
+  @SuppressWarnings("unchecked")
+  public void addDefaultValue(String className, JSONObject oper) throws Exception
+  {
+    ObjectMapper defaultValueMapper = ObjectMapperFactory.getOperatorValueSerializer();
+    Class<? extends Operator> clazz = (Class<? extends Operator>) classLoader.loadClass(className);
+    if (clazz != null) {
+      Operator operIns = clazz.newInstance();
+      String s = defaultValueMapper.writeValueAsString(operIns);
+      oper.put("defaultValue", new JSONObject(s).get(className));
     }
   }
 
@@ -346,70 +335,51 @@ public class OperatorDiscoverer
     saxParserFactory.newSAXParser().parse(is, new JavadocSAXHandler());
   }
 
-  public static boolean isInstantiableOperatorClass(Class<?> clazz)
-  {
-    int modifiers = clazz.getModifiers();
-    if (Modifier.isAbstract(modifiers) || Modifier.isInterface(modifiers) || !Operator.class.isAssignableFrom(clazz)){
-      return false;
-    }
-    // return true if it is an InputOperator or if it is an Operator with more than one InputPort
-    //TODO Use ASM later
-    return InputOperator.class.isAssignableFrom(clazz) || Iterables.any(Arrays.asList(clazz.getFields()), new Predicate<Field>() {
-      @Override
-      public boolean apply(Field f)
-      {
-        return Operator.InputPort.class.isAssignableFrom(f.getType());
-      }
-    });
-  }
-
-  public Set<Class<? extends Operator>> getOperatorClasses(String parent, String searchTerm) throws ClassNotFoundException
+  public Set<String> getOperatorClasses(String parent, String searchTerm) throws ClassNotFoundException
   {
-    if (operatorClasses.isEmpty()) {
+    if (CollectionUtils.isEmpty(operatorClassNames)) {
       loadOperatorClass();
     }
-    Class<?> parentClass;
     if (parent == null) {
-      parentClass = Operator.class;
-    }
-    else {
-      parentClass = classLoader.loadClass(parent);
-      if (!Operator.class.isAssignableFrom(parentClass)) {
+      parent = Operator.class.getName();
+    } else {
+      if (!typeGraph.isAncestor(Operator.class.getName(), parent)) {
         throw new IllegalArgumentException("Argument must be a subclass of Operator class");
       }
     }
-    Set<Class<? extends Operator>> filteredClass = Sets.filter(operatorClasses, new Predicate<Class<? extends Operator>>() {
 
+    Set<String> filteredClass = Sets.filter(operatorClassNames, new Predicate<String>()
+    {
       @Override
-      public boolean apply(Class<? extends Operator> c)
+      public boolean apply(String className)
       {
-        OperatorClassInfo oci = classInfo.get(c.getName());
+        OperatorClassInfo oci = classInfo.get(className);
         return oci == null || !oci.tags.containsKey("@omitFromUI");
       }
     });
-    if (searchTerm == null && parentClass == Operator.class) {
-      return Collections.unmodifiableSet(filteredClass);
+
+    if (searchTerm == null && parent == Operator.class.getName()) {
+      return filteredClass;
     }
+
     if (searchTerm != null) {
       searchTerm = searchTerm.toLowerCase();
     }
-    Set<Class<? extends Operator>> result = new HashSet<Class<? extends Operator>>();
-    for (Class<? extends Operator> clazz : filteredClass) {
-      if (parentClass.isAssignableFrom(clazz)) {
+
+    Set<String> result = new HashSet<String>();
+    for (String clazz : filteredClass) {
+      if (parent == Operator.class.getName() || typeGraph.isAncestor(parent, clazz)) {
         if (searchTerm == null) {
           result.add(clazz);
-        }
-        else {
-          if (clazz.getName().toLowerCase().contains(searchTerm)) {
+        } else {
+          if (clazz.toLowerCase().contains(searchTerm)) {
             result.add(clazz);
-          }
-          else {
-            OperatorClassInfo oci = classInfo.get(clazz.getName());
+          } else {
+            OperatorClassInfo oci = classInfo.get(clazz);
             if (oci != null) {
               if (oci.comment != null && oci.comment.toLowerCase().contains(searchTerm)) {
                 result.add(clazz);
-              }
-              else {
+              } else {
                 for (Map.Entry<String, String> entry : oci.tags.entrySet()) {
                   if (entry.getValue().toLowerCase().contains(searchTerm)) {
                     result.add(clazz);
@@ -428,7 +398,7 @@ public class OperatorDiscoverer
   @SuppressWarnings("unchecked")
   public Class<? extends Operator> getOperatorClass(String className) throws ClassNotFoundException
   {
-    if (operatorClasses.isEmpty()) {
+    if (CollectionUtils.isEmpty(operatorClassNames)) {
       loadOperatorClass();
     }
 
@@ -440,23 +410,24 @@ public class OperatorDiscoverer
     return (Class<? extends Operator>)clazz;
   }
 
-  public JSONObject describeOperator(Class<? extends Operator> clazz) throws Exception
+  public JSONObject describeOperator(String clazz) throws Exception
   {
-    if (OperatorDiscoverer.isInstantiableOperatorClass(clazz)) {
+    TypeGraphVertex tgv = typeGraph.getTypeGraphVertex(clazz);
+    if (tgv.isInstantiable()) {
       JSONObject response = new JSONObject();
       JSONArray inputPorts = new JSONArray();
       JSONArray outputPorts = new JSONArray();
       // Get properties from ASM
 
-      JSONObject operatorDescriptor =  describeClassByASM(clazz.getName());
+      JSONObject operatorDescriptor =  describeClassByASM(clazz);
       JSONArray properties = operatorDescriptor.getJSONArray("properties");
 
       properties = enrichProperties(clazz, properties);
 
       JSONArray portTypeInfo = operatorDescriptor.getJSONArray("portTypeInfo");
 
-      List<CompactFieldNode> inputPortfields = typeGraph.getAllInputPorts(clazz.getName());
-      List<CompactFieldNode> outputPortfields = typeGraph.getAllOutputPorts(clazz.getName());
+      List<CompactFieldNode> inputPortfields = typeGraph.getAllInputPorts(clazz);
+      List<CompactFieldNode> outputPortfields = typeGraph.getAllOutputPorts(clazz);
 
 
       try {
@@ -486,13 +457,13 @@ public class OperatorDiscoverer
           outputPorts.put(outputPort);
         }
 
-        response.put("name", clazz.getName());
+        response.put("name", clazz);
         response.put("properties", properties);
         response.put(PORT_TYPE_INFO_KEY, portTypeInfo);
         response.put("inputPorts", inputPorts);
         response.put("outputPorts", outputPorts);
 
-        OperatorClassInfo oci = classInfo.get(clazz.getName());
+        OperatorClassInfo oci = classInfo.get(clazz);
 
         if (oci != null) {
           if (oci.comment != null) {
@@ -515,7 +486,7 @@ public class OperatorDiscoverer
           response.put("category", oci.tags.get("@category"));
           String displayName = oci.tags.get("@displayName");
           if (displayName == null) {
-            displayName = decamelizeClassName(clazz.getSimpleName());
+            displayName = decamelizeClassName(ClassUtils.getShortClassName(clazz));
           }
           response.put("displayName", displayName);
           String tags = oci.tags.get("@tags");
@@ -530,9 +501,9 @@ public class OperatorDiscoverer
           if (doclink != null) {
             response.put("doclink", doclink + "?" + getDocName(clazz));
           }
-          else if (clazz.getName().startsWith("com.datatorrent.lib.") ||
-                  clazz.getName().startsWith("com.datatorrent.contrib.")) {
-            response.put("doclink", DT_OPERATOR_DOCLINK_PREFIX + "?" + getDocName(clazz));
+          else if (clazz.startsWith("com.datatorrent.lib.") ||
+                  clazz.startsWith("com.datatorrent.contrib.")) {
+            response.put("doclink", DT_OPERATOR_DOCLINK_PREFIX  + "?" + getDocName(clazz));
           }
         }
       }
@@ -546,29 +517,18 @@ public class OperatorDiscoverer
     }
   }
 
-  private JSONObject setFieldAttributes(Class<? extends Operator> clazz,
-      CompactFieldNode field) throws JSONException {
+  private JSONObject setFieldAttributes(String clazz, CompactFieldNode field) throws JSONException
+  {
     JSONObject port = new JSONObject();
     port.put("name", field.getName());
 
-    for (Class<?> c = clazz; c != null; c = c.getSuperclass()) {
-      OperatorClassInfo oci = classInfo.get(c.getName());
-      if (oci != null) {
-        String fieldDesc = oci.fields.get(field.getName());
-        if (fieldDesc != null) {
-          port.put("description", fieldDesc);
-          break;
-        }
-      }
-    }
+    TypeGraphVertex tgv = typeGraph.getTypeGraphVertex(clazz);
+    putFieldDescription(field, port, tgv);
 
     List<CompactAnnotationNode> annotations = field.getVisibleAnnotations();
     CompactAnnotationNode firstAnnotation;
-    if (annotations != null
-        && !annotations.isEmpty()
-        && (firstAnnotation = field
-        .getVisibleAnnotations().get(0)) != null) {
-      for(Map.Entry<String, Object> entry :firstAnnotation.getAnnotations().entrySet() ) {
+    if (annotations != null && !annotations.isEmpty() && (firstAnnotation = field.getVisibleAnnotations().get(0)) != null) {
+      for (Map.Entry<String, Object> entry : firstAnnotation.getAnnotations().entrySet()) {
         port.put(entry.getKey(), entry.getValue());
       }
     }
@@ -576,7 +536,23 @@ public class OperatorDiscoverer
     return port;
   }
 
-  private JSONArray enrichProperties(Class<?> operatorClass, JSONArray properties) throws JSONException
+  private void putFieldDescription(CompactFieldNode field, JSONObject port, TypeGraphVertex tgv) throws JSONException
+  {
+    OperatorClassInfo oci = classInfo.get(tgv.typeName);
+    if (oci != null) {
+      String fieldDesc = oci.fields.get(field.getName());
+      if (fieldDesc != null) {
+        port.put("description", fieldDesc);
+        return;
+      }
+    }
+
+    for (TypeGraphVertex ancestor : tgv.getAncestors()) {
+      putFieldDescription(field, port, ancestor);
+    }
+  }
+
+  private JSONArray enrichProperties(String operatorClass, JSONArray properties) throws JSONException
   {
     JSONArray result = new JSONArray();
     for (int i = 0; i < properties.length(); i++) {
@@ -602,16 +578,26 @@ public class OperatorDiscoverer
     return result;
   }
 
-  private OperatorClassInfo getOperatorClassWithGetterSetter(Class<?> operatorClass, String setterName, String getterName) {
-    if(operatorClass != null && !Operator.class.isAssignableFrom(operatorClass)){
-      return null;
-    }
-    OperatorClassInfo oci = classInfo.get(operatorClass.getName());
-    if(oci != null && (oci.getMethods.containsKey(getterName) || oci.setMethods.containsKey(setterName))){
+  private OperatorClassInfo getOperatorClassWithGetterSetter(String operatorClass, String setterName, String getterName)
+  {
+    TypeGraphVertex tgv = typeGraph.getTypeGraphVertex(operatorClass);
+    return getOperatorClassWithGetterSetter(tgv, setterName, getterName);
+  }
+
+  private OperatorClassInfo getOperatorClassWithGetterSetter(TypeGraphVertex tgv, String setterName, String getterName)
+  {
+    OperatorClassInfo oci = classInfo.get(tgv.typeName);
+    if (oci != null && (oci.getMethods.containsKey(getterName) || oci.setMethods.containsKey(setterName))) {
       return oci;
     } else {
-      return getOperatorClassWithGetterSetter(operatorClass.getSuperclass(), setterName, getterName);
+      if (tgv.getAncestors() != null) {
+        for (TypeGraphVertex ancestor : tgv.getAncestors()) {
+          return getOperatorClassWithGetterSetter(ancestor, setterName, getterName);
+        }
+      }
     }
+
+    return null;
   }
 
   private void addTagsToProperties(MethodInfo mi, JSONObject propJ) throws JSONException
@@ -675,9 +661,9 @@ public class OperatorDiscoverer
   }
 
 
-  private static String getDocName(Class<?> clazz)
+  private static String getDocName(String clazz)
   {
-    return clazz.getName().replace('.', '/').replace('$', '.') + ".html";
+    return clazz.replace('.', '/').replace('$', '.') + ".html";
   }
 
   private JSONArray getClassProperties(Class<?> clazz, int level) throws IntrospectionException

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ba46e71f/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java b/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java
index 010a976..97edf39 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/StramWebServices.java
@@ -288,11 +288,11 @@ public class StramWebServices
     }
 
     try {
-      Set<Class<? extends Operator>> operatorClasses = operatorDiscoverer.getOperatorClasses(parent, searchTerm);
+      Set<String> operatorClasses = operatorDiscoverer.getOperatorClasses(parent, searchTerm);
 
-      for (Class<?> clazz : operatorClasses) {
+      for (String clazz : operatorClasses) {
         JSONObject j = new JSONObject();
-        j.put("name", clazz.getName());
+        j.put("name", clazz);
         classNames.put(j);
       }
 
@@ -318,7 +318,7 @@ public class StramWebServices
     try {
       Class<?> clazz = Class.forName(className);
       if (Operator.class.isAssignableFrom(clazz)) {
-        return operatorDiscoverer.describeOperator((Class<? extends Operator>)clazz);
+        return operatorDiscoverer.describeOperator(className);
       }
       else {
         throw new NotFoundException();

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ba46e71f/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java b/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
index 61dc99d..d0b34c2 100644
--- a/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
+++ b/engine/src/main/java/com/datatorrent/stram/webapp/TypeGraph.java
@@ -37,6 +37,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.datatorrent.api.Component;
+import com.datatorrent.api.InputOperator;
 import com.datatorrent.api.Operator;
 import com.datatorrent.netlet.util.DTThrowable;
 import com.datatorrent.stram.webapp.asm.ClassNodeType;
@@ -91,6 +92,22 @@ public class TypeGraph
     JACKSON_INSTANTIABLE_CLASSES = b.build();
   }
 
+  public boolean isAncestor(String parentClassName, String subClassName)
+  {
+    TypeGraphVertex parentVertex = typeGraph.get(parentClassName);
+    TypeGraphVertex classVertex = typeGraph.get(subClassName);
+
+    if(parentVertex == null || classVertex == null)
+      return false;
+
+    return TypeGraph.isAncestor(parentVertex, classVertex);
+  }
+
+  public TypeGraphVertex getTypeGraphVertex(String className)
+  {
+    return typeGraph.get(className);
+  }
+
   private static boolean isAncestor(TypeGraphVertex typeTgv, TypeGraphVertex tgv)
   {
     if (tgv == typeTgv) {
@@ -368,6 +385,22 @@ public class TypeGraph
     return typeGraph.size();
   }
 
+  public Set<String> getAllDTInstantiableOperators()
+  {
+    TypeGraphVertex tgv = typeGraph.get(Operator.class.getName());
+    if (tgv == null) {
+      return null;
+    }
+    Set<String> result = new TreeSet<String>();
+    for (TypeGraphVertex node : tgv.allInstantiableDescendants) {
+      if ((isAncestor(InputOperator.class.getName(), node.typeName) || !getAllInputPorts(node).isEmpty())) {
+        result.add(node.typeName);
+      }
+    }
+
+    return result;
+  }
+
   public Set<String> getDescendants(String fullClassName)
   {
     Set<String> result = new HashSet<String>();
@@ -456,6 +489,10 @@ public class TypeGraph
       this.classNode = classNode;
     }
 
+    public Set<TypeGraphVertex> getAncestors()
+    {
+      return ancestors;
+    }
     public int numberOfInstantiableDescendants()
     {
       return allInstantiableDescendants.size() + (isInstantiable() ? 1 : 0);
@@ -467,7 +504,7 @@ public class TypeGraph
       this.jarName = jarName;
     }
 
-    private boolean isInstantiable()
+    public boolean isInstantiable()
     {
       return JACKSON_INSTANTIABLE_CLASSES.contains(this.typeName) || (isPublicConcrete() && classNode.getDefaultConstructor() != null);
     }
@@ -682,9 +719,17 @@ public class TypeGraph
     return null;
   }
 
-  public List<CompactFieldNode> getAllInputPorts(String clazzName) {
+  public List<CompactFieldNode> getAllInputPorts(String clazzName)
+  {
     TypeGraphVertex tgv = typeGraph.get(clazzName);
+    return getAllInputPorts(tgv);
+  }
+
+  public List<CompactFieldNode> getAllInputPorts(TypeGraphVertex tgv)
+  {
     List<CompactFieldNode> ports = new ArrayList<CompactFieldNode>();
+    if (tgv == null)
+      return ports;
     TypeGraphVertex portVertex = typeGraph.get(Operator.InputPort.class
         .getName());
     getAllPortsWithAncestor(portVertex, tgv, ports);
@@ -694,26 +739,28 @@ public class TypeGraph
         return a.getName().compareTo(b.getName());
       }
     });
+
     return ports;
   }
 
-  public List<CompactFieldNode> getAllOutputPorts(String clazzName) {
+  public List<CompactFieldNode> getAllOutputPorts(String clazzName)
+  {
     TypeGraphVertex tgv = typeGraph.get(clazzName);
     List<CompactFieldNode> ports = new ArrayList<CompactFieldNode>();
-    TypeGraphVertex portVertex = typeGraph.get(Operator.OutputPort.class
-        .getName());
+    TypeGraphVertex portVertex = typeGraph.get(Operator.OutputPort.class.getName());
     getAllPortsWithAncestor(portVertex, tgv, ports);
-    Collections.sort(ports, new Comparator<CompactFieldNode>() {
+    Collections.sort(ports, new Comparator<CompactFieldNode>()
+    {
       @Override
-      public int compare(CompactFieldNode a, CompactFieldNode b) {
+      public int compare(CompactFieldNode a, CompactFieldNode b)
+      {
         return a.getName().compareTo(b.getName());
       }
     });
     return ports;
   }
-  
-  private void getAllPortsWithAncestor(TypeGraphVertex portVertex,
-      TypeGraphVertex tgv, List<CompactFieldNode> ports)
+
+  private void getAllPortsWithAncestor(TypeGraphVertex portVertex, TypeGraphVertex tgv, List<CompactFieldNode> ports)
   {
     List<CompactFieldNode> fields = tgv.getClassNode().getPorts();
     if (fields != null) {
@@ -730,21 +777,22 @@ public class TypeGraph
     }
   }
 
-  private void addClassPropertiesAndPorts(String clazzName, JSONObject desc) throws JSONException {
+  private void addClassPropertiesAndPorts(String clazzName, JSONObject desc) throws JSONException
+  {
     TypeGraphVertex tgv = typeGraph.get(clazzName);
     if (tgv == null) {
       return;
     }
 
     Map<String, JSONObject> results = new TreeMap<String, JSONObject>();
-    List<CompactMethodNode> getters =  new LinkedList<CompactMethodNode>();
+    List<CompactMethodNode> getters = new LinkedList<CompactMethodNode>();
     List<CompactMethodNode> setters = new LinkedList<CompactMethodNode>();
     Map<Type, Type> typeReplacement = new HashMap<Type, Type>();
-    List<CompactFieldNode> ports =  new LinkedList<CompactFieldNode>();
-    
+    List<CompactFieldNode> ports = new LinkedList<CompactFieldNode>();
+
     getPublicSetterGetterAndPorts(tgv, setters, getters, typeReplacement, ports);
     desc.put("portTypeInfo", getPortTypeInfo(clazzName, typeReplacement, ports));
-    
+
     for (CompactMethodNode setter : setters) {
       String prop = WordUtils.uncapitalize(setter.getName().substring(3));
       JSONObject propJ = results.get(prop);
@@ -756,13 +804,12 @@ public class TypeGraph
       propJ.put("canSet", true);
       propJ.put("canGet", false);
 
-
       MethodSignatureVisitor msv = null;
       msv = setter.getMethodSignatureNode();
-      if(msv==null){
+      if (msv == null) {
         continue;
       }
-      
+
       List<Type> param = msv.getParameters();
       if (CollectionUtils.isEmpty(param)) {
         propJ.put("type", "UNKNOWN");
@@ -788,10 +835,9 @@ public class TypeGraph
 
         MethodSignatureVisitor msv = null;
         msv = getter.getMethodSignatureNode();
-        if(msv==null){
+        if (msv == null) {
           continue;
         }
-        
 
         Type rt = msv.getReturnType();
         if (rt == null) {

http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/ba46e71f/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
index 8baa08a..0f2fc13 100644
--- a/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
+++ b/engine/src/test/java/com/datatorrent/stram/webapp/OperatorDiscoveryTest.java
@@ -190,7 +190,8 @@ public class OperatorDiscoveryTest
     String[] classFilePath = getClassFileInClasspath();
     OperatorDiscoverer operatorDiscoverer = new OperatorDiscoverer(classFilePath);
     operatorDiscoverer.buildTypeGraph();
-    JSONObject oper = operatorDiscoverer.describeOperator(SubSubClassGeneric.class);
+    JSONObject oper = operatorDiscoverer.describeOperator(SubSubClassGeneric.class.getName());
+
     String debug = "\n(ASM)type info for " + TestOperator.class + ":\n" + oper.toString(2) + "\n";
 
     JSONArray props = oper.getJSONArray("properties");
@@ -277,8 +278,6 @@ public class OperatorDiscoveryTest
     od.buildTypeGraph();
 
     Assert.assertNotNull(od.getOperatorClass(BaseOperator.class.getName()));
-    Assert.assertFalse("Base Operator is not instantiable because it is not an InputOperator and it has no input port ",
-            OperatorDiscoverer.isInstantiableOperatorClass(BaseOperator.class));
 
     JSONObject asmDesc = od.describeClassByASM(TestOperator.class.getName());
     String debug = "\n(ASM)type info for " + TestOperator.class + ":\n" + asmDesc.toString(2) + "\n";
@@ -1057,7 +1056,7 @@ public class OperatorDiscoveryTest
     String[] classFilePath = getClassFileInClasspath();
     OperatorDiscoverer od = new OperatorDiscoverer(classFilePath);
     od.buildTypeGraph();
-    JSONObject operatorJson = od.describeOperator(SchemaRequiredOperator.class);
+    JSONObject operatorJson = od.describeOperator(SchemaRequiredOperator.class.getName());
     JSONArray portsJson = operatorJson.getJSONArray("outputPorts");
 
     Assert.assertEquals("no. of ports", 3, portsJson.length());
@@ -1081,7 +1080,7 @@ public class OperatorDiscoveryTest
     String[] classFilePath = getClassFileInClasspath();
     OperatorDiscoverer operatorDiscoverer = new OperatorDiscoverer(classFilePath);
     operatorDiscoverer.buildTypeGraph();
-    JSONObject operator = operatorDiscoverer.describeOperator(SubSubClassGeneric.class);
+    JSONObject operator = operatorDiscoverer.describeOperator(SubSubClassGeneric.class.getName());
 
     JSONObject portClassHierarchy = new JSONObject();
     JSONObject portsWithSchemaClasses = new JSONObject();


[03/50] incubator-apex-core git commit: Fixed documentation and spaces

Posted by ch...@apache.org.
Fixed documentation and spaces


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/6d0fde61
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/6d0fde61
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/6d0fde61

Branch: refs/heads/master
Commit: 6d0fde610da6ecc3617402310d044bd91e2ccc8a
Parents: 0f9fb47
Author: Chandni Singh <ch...@datatorrent.com>
Authored: Fri Jul 31 10:55:18 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Tue Aug 4 11:11:37 2015 -0700

----------------------------------------------------------------------
 api/src/main/java/com/datatorrent/api/AutoMetric.java | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/6d0fde61/api/src/main/java/com/datatorrent/api/AutoMetric.java
----------------------------------------------------------------------
diff --git a/api/src/main/java/com/datatorrent/api/AutoMetric.java b/api/src/main/java/com/datatorrent/api/AutoMetric.java
index 23c3be3..61e8dd0 100644
--- a/api/src/main/java/com/datatorrent/api/AutoMetric.java
+++ b/api/src/main/java/com/datatorrent/api/AutoMetric.java
@@ -39,19 +39,18 @@ public @interface AutoMetric
     /**
      * @return map of metric name to value
      */
-     Map<String, Object> getMetrics();
+    Map<String, Object> getMetrics();
 
     /**
-     *
      * @return operator id
      */
-     int operatorId();
+    int operatorId();
   }
 
   /**
    * It aggregates metrics from multiple physical partitions of an operator to a logical one.<br/>
-   * An aggregator is provided as operator attribute. By default when there isn't any aggregator set the engine does
-   * summation of a number metric.
+   * An aggregator is provided as operator attribute. By default, when there isn't any aggregator set explicitly,
+   * the application master sums up all the number metrics.
    */
   public static interface Aggregator
   {
@@ -98,7 +97,7 @@ public @interface AutoMetric
      * to aggregations. Stram will invoke this method for each logical metric and check if the aggregations are overwritten
      * and will inform that to app data tracker.
      *
-     * @param logicalMetricName  logical metric name.
+     * @param logicalMetricName logical metric name.
      * @return aggregations eg. SUM, MIN, MAX, etc. that will be performed by app data tracker on a logical metric.
      */
     String[] getDimensionAggregationsFor(String logicalMetricName);


[44/50] incubator-apex-core git commit: Merge branch 'checkPoint_COPYING_overwrite' of https://github.com/ishark/Apex into ishark-checkPoint_COPYING_overwrite

Posted by ch...@apache.org.
Merge branch 'checkPoint_COPYING_overwrite' of https://github.com/ishark/Apex into ishark-checkPoint_COPYING_overwrite


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/e54e94be
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/e54e94be
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/e54e94be

Branch: refs/heads/master
Commit: e54e94bef39001693ed71156dd101277c75e2265
Parents: a2f9d2e be4af0a
Author: thomas <th...@datatorrent.com>
Authored: Fri Aug 21 16:59:59 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Fri Aug 21 16:59:59 2015 -0700

----------------------------------------------------------------------
 .../common/util/AsyncFSStorageAgent.java        | 50 +++++++++++++++-----
 1 file changed, 39 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/e54e94be/common/src/main/java/com/datatorrent/common/util/AsyncFSStorageAgent.java
----------------------------------------------------------------------


[36/50] incubator-apex-core git commit: corrected minor spelling error

Posted by ch...@apache.org.
corrected minor spelling error


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/3c5b88c2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/3c5b88c2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/3c5b88c2

Branch: refs/heads/master
Commit: 3c5b88c227b40824c9635dd2d7bb77c9937486d3
Parents: ccf704e
Author: David Yan <da...@datatorrent.com>
Authored: Wed Aug 19 14:45:00 2015 -0700
Committer: David Yan <da...@datatorrent.com>
Committed: Wed Aug 19 14:45:00 2015 -0700

----------------------------------------------------------------------
 engine/src/main/java/com/datatorrent/stram/StramClient.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/3c5b88c2/engine/src/main/java/com/datatorrent/stram/StramClient.java
----------------------------------------------------------------------
diff --git a/engine/src/main/java/com/datatorrent/stram/StramClient.java b/engine/src/main/java/com/datatorrent/stram/StramClient.java
index dfb4511..8a8baf3 100644
--- a/engine/src/main/java/com/datatorrent/stram/StramClient.java
+++ b/engine/src/main/java/com/datatorrent/stram/StramClient.java
@@ -349,7 +349,7 @@ public class StramClient
 
     // Dump out information about cluster capability as seen by the resource manager
     int maxMem = newApp.getNewApplicationResponse().getMaximumResourceCapability().getMemory();
-    LOG.info("Max mem capabililty of resources in this cluster " + maxMem);
+    LOG.info("Max mem capability of resources in this cluster " + maxMem);
     int amMemory = dag.getMasterMemoryMB();
     if (amMemory > maxMem) {
       LOG.info("AM memory specified above max threshold of cluster. Using max value."


[08/50] incubator-apex-core git commit: Merge branch 'master-3.1-backport' into v3.1.0

Posted by ch...@apache.org.
Merge branch 'master-3.1-backport' into v3.1.0


Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/528423a8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/528423a8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/528423a8

Branch: refs/heads/master
Commit: 528423a8330ab62115b2b7250aada3a55cd0d20f
Parents: 3c0076a 6d0fde6
Author: thomas <th...@datatorrent.com>
Authored: Tue Aug 4 21:51:12 2015 -0700
Committer: thomas <th...@datatorrent.com>
Committed: Tue Aug 4 21:51:12 2015 -0700

----------------------------------------------------------------------
 api/src/main/java/com/datatorrent/api/AutoMetric.java    | 11 +++++------
 .../com/datatorrent/stram/engine/StreamingContainer.java |  6 ++++--
 2 files changed, 9 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-apex-core/blob/528423a8/engine/src/main/java/com/datatorrent/stram/engine/StreamingContainer.java
----------------------------------------------------------------------


[18/50] incubator-apex-core git commit: Merge pull request #115 from gauravgopi123/v3.1.0

Posted by ch...@apache.org.
Merge pull request #115 from gauravgopi123/v3.1.0

Fixed tests that were creating unwanted folders

Project: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/commit/8cbecac5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/tree/8cbecac5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-apex-core/diff/8cbecac5

Branch: refs/heads/master
Commit: 8cbecac53085f2eb5bbaa43b5c4cfba0dd2c2479
Parents: d934b97 1617ca3
Author: Chetan Narsude <ch...@datatorrent.com>
Authored: Thu Aug 6 21:35:46 2015 -0700
Committer: Chetan Narsude <ch...@datatorrent.com>
Committed: Thu Aug 6 21:35:46 2015 -0700

----------------------------------------------------------------------
 .../stram/StreamingContainerManagerTest.java        |  7 +++----
 .../datatorrent/stram/engine/AtLeastOnceTest.java   | 16 ++++++++++++++++
 .../stram/engine/StreamingContainerTest.java        |  6 ++++++
 .../datatorrent/stram/stream/OiOEndWindowTest.java  |  5 +++++
 4 files changed, 30 insertions(+), 4 deletions(-)
----------------------------------------------------------------------