You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@falcon.apache.org by ra...@apache.org on 2014/12/02 19:48:58 UTC

[1/3] incubator-falcon git commit: FALCON-928 Use falcon's checkstyle for falcon regression code. Contributed by Raghav Kumar Gautam

Repository: incubator-falcon
Updated Branches:
  refs/heads/master 9f1efe096 -> 4f2b524d9


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationPartitionExpTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationPartitionExpTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationPartitionExpTest.java
index bde4e65..d7df953 100755
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationPartitionExpTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationPartitionExpTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.falcon.regression.prism;
 
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.feed.ActionType;
@@ -31,7 +32,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -70,10 +70,10 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
     private String testBaseDir2 = baseTestDir + "/clusterPath/localDC/rc/billing";
     private String testBaseDir3 = baseTestDir + "/dataBillingRC/fetlrc/billing";
     private String testBaseDir4 = baseTestDir + "/sourcetarget";
-    private String testBaseDirServer1source = baseTestDir + "/source1";
+    private String testBaseDirServer1Source = baseTestDir + "/source1";
     private String testDirWithDate = testBaseDir1 + testDate;
-    private String testDirWithDateSourcetarget = testBaseDir4 + testDate;
-    private String testDirWithDateSource1 = testBaseDirServer1source + testDate;
+    private String testDirWithDateSourceTarget = testBaseDir4 + testDate;
+    private String testDirWithDateSource1 = testBaseDirServer1Source + testDate;
     private String testFile1 = OSUtil.RESOURCES
         + OSUtil.getPath("ReplicationResources", "feed-s4Replication.xml");
     private String testFile2 = OSUtil.RESOURCES + OSUtil.getPath("ReplicationResources", "id.pig");
@@ -145,17 +145,17 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
         //data for test normalTest_1s2t_pst where both source target partition are required
 
-        uploadDataToServer3(testDirWithDateSourcetarget + "00/ua3/ua2/", testFile1);
-        uploadDataToServer3(testDirWithDateSourcetarget + "05/ua3/ua2/", testFile2);
-        uploadDataToServer3(testDirWithDateSourcetarget + "10/ua3/ua2/", testFile3);
-        uploadDataToServer3(testDirWithDateSourcetarget + "15/ua3/ua2/", testFile4);
-        uploadDataToServer3(testDirWithDateSourcetarget + "20/ua3/ua2/", testFile4);
+        uploadDataToServer3(testDirWithDateSourceTarget + "00/ua3/ua2/", testFile1);
+        uploadDataToServer3(testDirWithDateSourceTarget + "05/ua3/ua2/", testFile2);
+        uploadDataToServer3(testDirWithDateSourceTarget + "10/ua3/ua2/", testFile3);
+        uploadDataToServer3(testDirWithDateSourceTarget + "15/ua3/ua2/", testFile4);
+        uploadDataToServer3(testDirWithDateSourceTarget + "20/ua3/ua2/", testFile4);
 
-        uploadDataToServer3(testDirWithDateSourcetarget + "00/ua3/ua1/", testFile1);
-        uploadDataToServer3(testDirWithDateSourcetarget + "05/ua3/ua1/", testFile2);
-        uploadDataToServer3(testDirWithDateSourcetarget + "10/ua3/ua1/", testFile3);
-        uploadDataToServer3(testDirWithDateSourcetarget + "15/ua3/ua1/", testFile4);
-        uploadDataToServer3(testDirWithDateSourcetarget + "20/ua3/ua1/", testFile4);
+        uploadDataToServer3(testDirWithDateSourceTarget + "00/ua3/ua1/", testFile1);
+        uploadDataToServer3(testDirWithDateSourceTarget + "05/ua3/ua1/", testFile2);
+        uploadDataToServer3(testDirWithDateSourceTarget + "10/ua3/ua1/", testFile3);
+        uploadDataToServer3(testDirWithDateSourceTarget + "15/ua3/ua1/", testFile4);
+        uploadDataToServer3(testDirWithDateSourceTarget + "20/ua3/ua1/", testFile4);
 
         // data when server 1 acts as source
         uploadDataToServer1(testDirWithDateSource1 + "00/ua2/", testFile1);
@@ -208,27 +208,33 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2012-10-01T12:10Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE, "",
-                testBaseDir1 + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2012-10-01T12:25Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, "",
-                testBaseDir2 + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE, "");
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2012-10-01T12:10Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("")
+                .withDataLocation(testBaseDir1 + MINUTE_DATE_PATTERN)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2012-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withPartition("")
+                .withDataLocation(testBaseDir2 + MINUTE_DATE_PATTERN)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("")
+                .build()).toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -240,7 +246,7 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
     @Test(enabled = true)
-    public void normalTest1s1t1nPS() throws Exception {
+    public void normalTest1Source1Target1NeutralPartitionedSource() throws Exception {
         //this test is for ideal condition when data is present in all the required places and
         // replication takes
         // place normally
@@ -256,27 +262,33 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(100000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(100000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), null, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(100000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-                testBaseDir2 + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("days(100000)", ActionType.DELETE),
-                Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-                "${cluster.colo}", testBaseDir1 + MINUTE_DATE_PATTERN);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(100000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2099-10-01T12:10Z")
+                .build())
+            .toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(100000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(testBaseDir2 + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(100000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(testBaseDir1 + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -339,7 +351,7 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
     @Test(enabled = true)
-    public void normalTest1s1t1nPT() throws Exception {
+    public void normalTest1Source1Target1NeutralPartitionedTarget() throws Exception {
         //this test is for ideal condition when data is present in all the required places and
         // replication takes
         // place normally
@@ -351,27 +363,31 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), null, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET,
-                "${cluster.colo}", testBaseDir2 + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE, null,
-            testBaseDir1 + MINUTE_DATE_PATTERN);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2099-10-01T12:10Z")
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(testBaseDir2 + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withDataLocation(testBaseDir1 + MINUTE_DATE_PATTERN)
+                .build()).toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -423,7 +439,7 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
     @Test(enabled = true)
-    public void normalTest1s2tPT() throws Exception {
+    public void normalTest1Source2TargetPartitionedTarget() throws Exception {
         //this test is for ideal condition when data is present in all the required places and
         // replication takes
         // place normally
@@ -442,27 +458,30 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
         String feed = bundles[0].getDataSets().get(0);
         feed = InstanceUtil.setFeedFilePath(feed, testBaseDir3 + MINUTE_DATE_PATTERN);
 
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
 
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2012-10-01T12:10Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET,
-                "${cluster.colo}");
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2012-10-01T12:10Z")
+                .withClusterType(ClusterType.TARGET)
+                .withPartition("${cluster.colo}")
+                .build()).toString();
 
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2012-10-01T12:25Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET,
-                "${cluster.colo}");
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2012-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withPartition("${cluster.colo}")
+                .build()).toString();
 
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
 
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
@@ -521,7 +540,7 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
     }
 
     @Test(enabled = true, groups = "embedded")
-    public void normalTest2s1tPT() throws Exception {
+    public void normalTest2Source1TargetPartitionedTarget() throws Exception {
         //this test is for ideal condition when data is present in all the required places and
         // replication takes
         // place normally
@@ -538,28 +557,32 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2012-10-01T12:10Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE, null,
-                testBaseDir1 + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2012-10-01T12:25Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET,
-                "${cluster.colo}",
-                testBaseDir2 + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2012-10-01T12:10Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withDataLocation(testBaseDir1 + MINUTE_DATE_PATTERN)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2012-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(testBaseDir2 + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
 
         //clean target if old data exists
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
@@ -575,7 +598,7 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
     @Test(enabled = true)
-    public void normalTest1s2tPS() throws Exception {
+    public void normalTest1Source2TargetPartitionedSource() throws Exception {
 
         //this test is for ideal condition when data is present in all the required places and
         // replication takes
@@ -595,28 +618,33 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
         String feed = bundles[0].getDataSets().get(0);
         feed = InstanceUtil.setFeedFilePath(feed,
             testBaseDir1 + MINUTE_DATE_PATTERN);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(10000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2012-10-01T12:11Z"),
-                XmlUtil.createRetention("days(10000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null,
-                testBaseDir1 + "/ua1" + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2012-10-01T12:26Z"),
-                XmlUtil.createRetention("days(10000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-                testBaseDir1 + "/ua2" + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("days(10000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "${cluster.colo}");
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(10000000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2012-10-01T12:11Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(testBaseDir1 + "/ua1" + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(10000000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2012-10-01T12:26Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(testBaseDir1 + "/ua2" + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(10000000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .build()).toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -678,7 +706,7 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
     @Test(enabled = true)
-    public void normalTest2s1tPS() throws Exception {
+    public void normalTest2Source1TargetPartitionedSource() throws Exception {
         //this test is for ideal condition when data is present in all the required places and
         // replication takes
         // place normally
@@ -695,29 +723,34 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
         String startTimeUA2 = "2012-10-01T12:00Z";
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE,
-                "${cluster.colo}",
-                testBaseDirServer1source + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-                testBaseDir2 + "/replicated" + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "${cluster.colo}", testBaseDir1 + MINUTE_DATE_PATTERN);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2099-10-01T12:10Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(testBaseDirServer1Source + MINUTE_DATE_PATTERN)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(testBaseDir2 + "/replicated" + MINUTE_DATE_PATTERN)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(testBaseDir1 + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -750,7 +783,7 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
         List<Path> ua1OriginalData00 = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster1FS, new Path(
-                testBaseDirServer1source + testDate + "00/ua1"));
+                testBaseDirServer1Source + testDate + "00/ua1"));
         List<Path> ua3OriginalData05 = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster3FS, new Path(testDirWithDate + "05/ua1"));
 
@@ -760,7 +793,7 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
 
     @Test(enabled = true)
-    public void normalTest1s2tPST() throws Exception {
+    public void normalTest1Source2TargetPartitionedSourceTarget() throws Exception {
 
 
         //this test is for ideal condition when data is present in all the required places and
@@ -779,28 +812,35 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
         String feed = bundles[0].getDataSets().get(0);
         feed = InstanceUtil.setFeedFilePath(feed, testBaseDir1 + MINUTE_DATE_PATTERN);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET,
-                "${cluster.colo}", testBaseDir1 + "/ua1" + MINUTE_DATE_PATTERN + "/");
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET,
-                "${cluster.colo}", testBaseDir1 + "/ua2" + MINUTE_DATE_PATTERN + "/");
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
-            , XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "${cluster.colo}", testBaseDir4 + MINUTE_DATE_PATTERN + "/");
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2099-10-01T12:10Z")
+                .withClusterType(ClusterType.TARGET)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(testBaseDir1 + "/ua1" + MINUTE_DATE_PATTERN + "/")
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(testBaseDir1 + "/ua2" + MINUTE_DATE_PATTERN + "/")
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(testBaseDir4 + MINUTE_DATE_PATTERN + "/")
+                .build())
+            .toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -846,16 +886,16 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
 
         List<Path> ua3OriginalData05ua1 = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster3FS, new Path(
-                testDirWithDateSourcetarget + "05/ua3/ua1"));
+                testDirWithDateSourceTarget + "05/ua3/ua1"));
         List<Path> ua3OriginalData10ua1 = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster3FS, new Path(
-                testDirWithDateSourcetarget + "10/ua3/ua1"));
+                testDirWithDateSourceTarget + "10/ua3/ua1"));
         List<Path> ua3OriginalData10ua2 = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster3FS, new Path(
-                testDirWithDateSourcetarget + "10/ua3/ua2"));
+                testDirWithDateSourceTarget + "10/ua3/ua2"));
         List<Path> ua3OriginalData15ua2 = HadoopUtil
             .getAllFilesRecursivelyHDFS(cluster3FS, new Path(
-                testDirWithDateSourcetarget + "15/ua3/ua2"));
+                testDirWithDateSourceTarget + "15/ua3/ua2"));
 
         AssertUtil.checkForListSizes(ua1ReplicatedData05, ua3OriginalData05ua1);
         AssertUtil.checkForListSizes(ua1ReplicatedData10, ua3OriginalData10ua1);
@@ -872,27 +912,33 @@ public class PrismFeedReplicationPartitionExpTest extends BaseTestClass {
         String startTimeUA2 = "2012-10-01T12:10Z";
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2012-10-01T12:10Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE, "",
-                testBaseDir1 + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2012-10-01T12:25Z"),
-                XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-                Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.TARGET, "",
-                testBaseDir2 + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE, "");
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2012-10-01T12:10Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("")
+                .withDataLocation(testBaseDir1 + MINUTE_DATE_PATTERN)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2012-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withPartition("")
+                .withDataLocation(testBaseDir2 + MINUTE_DATE_PATTERN)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity("2012-10-01T12:00Z", "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("")
+                .build()).toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationUpdateTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationUpdateTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationUpdateTest.java
index 76345e0..0bd4c31 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationUpdateTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedReplicationUpdateTest.java
@@ -18,6 +18,8 @@
 
 package org.apache.falcon.regression.prism;
 
+import org.apache.falcon.regression.Entities.FeedMerlin;
+import org.apache.falcon.regression.Entities.ProcessMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.feed.ActionType;
@@ -30,7 +32,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.log4j.Logger;
@@ -104,10 +105,7 @@ public class PrismFeedReplicationUpdateTest extends BaseTestClass {
         Bundle.submitCluster(bundles[0], bundles[1], bundles[2]);
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
 
         // use the colo string here so that the test works in embedded and distributed mode.
         String postFix = "/US/" + cluster2Colo;
@@ -123,24 +121,31 @@ public class PrismFeedReplicationUpdateTest extends BaseTestClass {
 
         String startTime = TimeUtil.getTimeWrtSystemTime(-30);
 
-        feed = InstanceUtil.setFeedCluster(feed, XmlUtil.createValidity(startTime,
-            TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-            "US/${cluster.colo}");
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 105)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
-
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 130)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feed = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("US/${cluster.colo}")
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 105))
+                .withClusterType(ClusterType.TARGET)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 130))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build()).toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -198,20 +203,9 @@ public class PrismFeedReplicationUpdateTest extends BaseTestClass {
         String outputFeed = bundles[0].getOutputFeedFromBundle();
 
         //set clusters to null;
-        feed01 = InstanceUtil.setFeedCluster(feed01,
-            XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed02 = InstanceUtil.setFeedCluster(feed02,
-            XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        outputFeed = InstanceUtil.setFeedCluster(outputFeed,
-            XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed01 = FeedMerlin.fromString(feed01).clearFeedClusters().toString();
+        feed02 = FeedMerlin.fromString(feed02).clearFeedClusters().toString();
+        outputFeed = FeedMerlin.fromString(outputFeed).clearFeedClusters().toString();
 
         //set new feed input data
         feed01 = Util.setFeedPathValue(feed01, baseHDFSDir + "/feed01" + MINUTE_DATE_PATTERN);
@@ -229,41 +223,49 @@ public class PrismFeedReplicationUpdateTest extends BaseTestClass {
         String startTime = TimeUtil.getTimeWrtSystemTime(-50);
 
         //set clusters for feed01
-        feed01 = InstanceUtil
-            .setFeedCluster(feed01, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE,
-                null);
-
-        feed01 = InstanceUtil
-            .setFeedCluster(feed01, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.TARGET,
-                null);
+        feed01 = FeedMerlin.fromString(feed01).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
+
+        feed01 = FeedMerlin.fromString(feed01).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.TARGET)
+                .build()).toString();
 
         //set clusters for feed02
-        feed02 = InstanceUtil
-            .setFeedCluster(feed02, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET,
-                null);
-
-        feed02 = InstanceUtil
-            .setFeedCluster(feed02, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-                null);
+        feed02 = FeedMerlin.fromString(feed02).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.TARGET)
+                .build()).toString();
+
+        feed02 = FeedMerlin.fromString(feed02).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
 
         //set clusters for output feed
-        outputFeed = InstanceUtil.setFeedCluster(outputFeed,
-            XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE, null);
-
-        outputFeed = InstanceUtil.setFeedCluster(outputFeed,
-            XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.TARGET, null);
+        outputFeed = FeedMerlin.fromString(outputFeed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
+
+        outputFeed = FeedMerlin.fromString(outputFeed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.TARGET)
+                .build()).toString();
 
         //submit and schedule feeds
         prism.getFeedHelper().submitAndSchedule(feed01);
@@ -279,16 +281,21 @@ public class PrismFeedReplicationUpdateTest extends BaseTestClass {
         String processStartTime = TimeUtil.getTimeWrtSystemTime(-6);
         String processEndTime = TimeUtil.getTimeWrtSystemTime(70);
 
-        process = InstanceUtil.setProcessCluster(process, null,
-            XmlUtil.createProcessValidity(startTime, "2099-01-01T00:00Z"));
-
-        process = InstanceUtil
-            .setProcessCluster(process, Util.readEntityName(bundles[0].getClusters().get(0)),
-                XmlUtil.createProcessValidity(processStartTime, processEndTime));
-
-        process = InstanceUtil
-            .setProcessCluster(process, Util.readEntityName(bundles[2].getClusters().get(0)),
-                XmlUtil.createProcessValidity(processStartTime, processEndTime));
+        process = ProcessMerlin.fromString(process).clearProcessCluster().toString();
+
+        process = ProcessMerlin.fromString(process).addProcessCluster(
+            new ProcessMerlin.ProcessClusterBuilder(
+                Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withValidity(processStartTime, processEndTime)
+                .build()
+        ).toString();
+
+        process = ProcessMerlin.fromString(process).addProcessCluster(
+            new ProcessMerlin.ProcessClusterBuilder(
+                Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withValidity(processStartTime, processEndTime)
+                .build()
+        ).toString();
         process = InstanceUtil.addProcessInputFeed(process, Util.readEntityName(feed02),
             Util.readEntityName(feed02));
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedSnSTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedSnSTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedSnSTest.java
index 1cf44b7..3c48fc4 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedSnSTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedSnSTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.falcon.regression.prism;
 
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.feed.ActionType;
@@ -26,11 +27,9 @@ import org.apache.falcon.regression.core.helpers.ColoHelper;
 import org.apache.falcon.regression.core.response.ServiceResponse;
 import org.apache.falcon.regression.core.util.AssertUtil;
 import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.OozieUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.log4j.Logger;
 import org.apache.oozie.client.Job;
@@ -384,21 +383,25 @@ public class PrismFeedSnSTest extends BaseTestClass {
         String startTimeUA2 = "2012-10-01T12:00Z";
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA1, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE),
-                Util.readEntityName(clust1), ClusterType.SOURCE, "${cluster.colo}",
-                baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN);
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeUA2, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE),
-                Util.readEntityName(clust2), ClusterType.TARGET, null, baseHDFSDir
-                + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(clust1))
+                .withRetention("days(10000)", ActionType.DELETE)
+                .withValidity(startTimeUA1, "2099-10-01T12:10Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(clust2))
+                .withRetention("days(10000)", ActionType.DELETE)
+                .withValidity(startTimeUA2, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(
+                    baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+                .build()).toString();
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
         Util.shutDownService(cluster1.getFeedHelper());

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedUpdateTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedUpdateTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedUpdateTest.java
index 902ec23..9f31e84 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedUpdateTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedUpdateTest.java
@@ -33,7 +33,6 @@ import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.OozieUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.log4j.Logger;
@@ -123,16 +122,8 @@ public class PrismFeedUpdateTest extends BaseTestClass {
 
         /* set source and target for the 2 feeds */
         //set clusters to null;
-        feed01 = InstanceUtil
-            .setFeedCluster(feed01,
-                XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
-                ClusterType.SOURCE, null);
-        outputFeed = InstanceUtil
-            .setFeedCluster(outputFeed,
-                XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
-                ClusterType.SOURCE, null);
+        feed01 = FeedMerlin.fromString(feed01).clearFeedClusters().toString();
+        outputFeed = FeedMerlin.fromString(outputFeed).clearFeedClusters().toString();
 
         //set new feed input data
         feed01 = Util.setFeedPathValue(feed01, baseTestDir + "/feed01" + MINUTE_DATE_PATTERN);
@@ -145,24 +136,32 @@ public class PrismFeedUpdateTest extends BaseTestClass {
         HadoopUtil.lateDataReplenish(server1FS, 80, 20, prefix, null);
 
         //set clusters for feed01
-        feed01 = InstanceUtil
-            .setFeedCluster(feed01, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(cluster1Def), ClusterType.SOURCE, null);
-        feed01 = InstanceUtil
-            .setFeedCluster(feed01, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(cluster2Def), ClusterType.TARGET, null);
+        feed01 = FeedMerlin.fromString(feed01).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster1Def))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
+        feed01 = FeedMerlin.fromString(feed01).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster2Def))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.TARGET)
+                .build()).toString();
 
         //set clusters for output feed
-        outputFeed = InstanceUtil.setFeedCluster(outputFeed,
-            XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(cluster1Def), ClusterType.SOURCE, null);
-        outputFeed = InstanceUtil.setFeedCluster(outputFeed,
-            XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(cluster2Def), ClusterType.TARGET, null);
+        outputFeed = FeedMerlin.fromString(outputFeed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster1Def))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
+        outputFeed = FeedMerlin.fromString(outputFeed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster2Def))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.TARGET)
+                .build()).toString();
 
         //submit and schedule feeds
         LOGGER.info("feed01: " + Util.prettyPrintXml(feed01));
@@ -177,15 +176,17 @@ public class PrismFeedUpdateTest extends BaseTestClass {
         //add clusters to process
         String processStartTime = TimeUtil.getTimeWrtSystemTime(-11);
         String processEndTime = TimeUtil.getTimeWrtSystemTime(70);
-        process01 = InstanceUtil
-            .setProcessCluster(process01, null,
-                XmlUtil.createProcessValidity(startTime, "2099-01-01T00:00Z"));
-        process01 = InstanceUtil
-            .setProcessCluster(process01, Util.readEntityName(cluster1Def),
-                XmlUtil.createProcessValidity(processStartTime, processEndTime));
-        process01 = InstanceUtil
-            .setProcessCluster(process01, Util.readEntityName(cluster2Def),
-                XmlUtil.createProcessValidity(processStartTime, processEndTime));
+        process01 = ProcessMerlin.fromString(process01).clearProcessCluster().toString();
+        process01 = ProcessMerlin.fromString(process01).addProcessCluster(
+            new ProcessMerlin.ProcessClusterBuilder(Util.readEntityName(cluster1Def))
+                .withValidity(processStartTime, processEndTime)
+                .build()
+        ).toString();
+        process01 = ProcessMerlin.fromString(process01).addProcessCluster(
+            new ProcessMerlin.ProcessClusterBuilder(Util.readEntityName(cluster2Def))
+                .withValidity(processStartTime, processEndTime)
+                .build()
+        ).toString();
 
         //get 2nd process
         String process02 = process01;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismSubmitTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismSubmitTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismSubmitTest.java
index 279dc56..f176da7 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismSubmitTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismSubmitTest.java
@@ -80,7 +80,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "distributed")
-    public void submitCluster1prism1coloPrismDown() throws Exception {
+    public void submitCluster1Prism1ColoPrismDown() throws Exception {
         restartRequired = true;
         Util.shutDownService(prism.getClusterHelper());
 
@@ -98,7 +98,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "distributed")
-    public void submitClusterResubmitDiffContent() throws Exception {
+    public void submitClusterReSubmitDiffContent() throws Exception {
         ServiceResponse r = prism.getClusterHelper().submitEntity(bundles[0].getClusters().get(0));
         Assert.assertTrue(r.getMessage().contains("SUCCEEDED"));
         List<String> beforeSubmitCluster = cluster1.getClusterHelper().getStoreInfo();
@@ -117,7 +117,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "distributed")
-    public void submitClusterResubmitAlreadyPARTIALWithAllUp() throws Exception {
+    public void submitClusterResubmitAlreadyPartialWithAllUp() throws Exception {
         restartRequired = true;
         Util.shutDownService(cluster1.getClusterHelper());
         TimeUtil.sleepSeconds(30);
@@ -231,7 +231,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "distributed")
-    public void submitCluster1prism1coloColoDown() throws Exception {
+    public void submitCluster1Prism1ColoColoDown() throws Exception {
         restartRequired = true;
         Util.shutDownService(cluster1.getClusterHelper());
 
@@ -271,7 +271,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "distributed")
-    public void submitCluster1prism1coloSubmitDeleted() throws Exception {
+    public void submitCluster1Prism1ColoSubmitDeleted() throws Exception {
         prism.getClusterHelper().submitEntity(bundles[0].getClusters().get(0));
         prism.getClusterHelper().delete(bundles[0].getClusters().get(0));
 
@@ -296,7 +296,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "embedded")
-    public void submitProcessWOClusterSubmit() throws Exception {
+    public void submitProcessWoClusterSubmit() throws Exception {
         ServiceResponse r = prism.getProcessHelper().submitEntity(bundles[0].getProcessData());
 
         Assert.assertTrue(r.getMessage().contains("FAILED"));
@@ -304,7 +304,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "embedded")
-    public void submitProcessWOFeedSubmit() throws Exception {
+    public void submitProcessWoFeedSubmit() throws Exception {
         ServiceResponse r = prism.getClusterHelper().submitEntity(bundles[0].getClusters().get(0));
         Assert.assertTrue(r.getMessage().contains("SUCCEEDED"));
 
@@ -314,7 +314,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = {"prism", "0.2", "distributed"})
-    public void submitClusterResubmitAlreadyPARTIAL() throws Exception {
+    public void submitClusterReSubmitAlreadyPartial() throws Exception {
         restartRequired = true;
         bundles[1] = new Bundle(bundles[0], cluster2);
         bundles[1].generateUniqueBundle();
@@ -388,7 +388,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "distributed")
-    public void submitClusterResubmitDiffContentPARTIAL() throws Exception {
+    public void submitClusterResubmitDiffContentPartial() throws Exception {
         restartRequired = true;
         Util.shutDownService(cluster1.getClusterHelper());
         TimeUtil.sleepSeconds(30);
@@ -412,7 +412,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test
-    public void submitClusterPARTIALDeletedOfPARTIALSubmit() throws Exception {
+    public void submitClusterPartialDeletedOfPartialSubmit() throws Exception {
         restartRequired = true;
         Util.shutDownService(cluster1.getClusterHelper());
         TimeUtil.sleepSeconds(30);
@@ -470,7 +470,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "embedded")
-    public void submitClusterResubmitAlreadySucceeded() throws Exception {
+    public void submitClusterReSubmitAlreadySucceeded() throws Exception {
         ServiceResponse r = prism.getClusterHelper().submitEntity(bundles[0].getClusters().get(0));
         Assert.assertTrue(r.getMessage().contains("SUCCEEDED"));
 
@@ -486,7 +486,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "distributed")
-    public void submitCluster1prism1coloAllUp() throws Exception {
+    public void submitCluster1Prism1ColoAllUp() throws Exception {
         List<String> beforeSubmitCluster1 = cluster1.getClusterHelper().getStoreInfo();
         List<String> beforeSubmitCluster2 = cluster2.getClusterHelper().getStoreInfo();
         List<String> beforeSubmitPrism = prism.getClusterHelper().getStoreInfo();
@@ -507,7 +507,7 @@ public class PrismSubmitTest extends BaseTestClass {
     }
 
     @Test(groups = "embedded")
-    public void submitCluster1prism1coloAlreadySubmitted() throws Exception {
+    public void submitCluster1Prism1ColoAlreadySubmitted() throws Exception {
         prism.getClusterHelper().submitEntity(bundles[0].getClusters().get(0));
 
         List<String> beforeSubmitCluster1 = cluster1.getClusterHelper().getStoreInfo();

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RescheduleKilledProcessTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RescheduleKilledProcessTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RescheduleKilledProcessTest.java
index 4a0c10e..32c9783 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RescheduleKilledProcessTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/RescheduleKilledProcessTest.java
@@ -27,7 +27,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.log4j.Logger;
 import org.testng.annotations.AfterClass;
@@ -88,11 +87,13 @@ public class RescheduleKilledProcessTest extends BaseTestClass {
         processMerlin.setProcessFeeds(feed, 0, 0, 1);
         process = processMerlin.toString();
 
-        process = InstanceUtil.setProcessCluster(process, null,
-            XmlUtil.createProcessValidity(processStartTime, "2099-01-01T00:00Z"));
-        process = InstanceUtil
-            .setProcessCluster(process, Util.readEntityName(bundles[0].getClusters().get(0)),
-                XmlUtil.createProcessValidity(processStartTime, processEndTime));
+        process = ProcessMerlin.fromString(process).clearProcessCluster().toString();
+        process = ProcessMerlin.fromString(process).addProcessCluster(
+            new ProcessMerlin.ProcessClusterBuilder(
+                Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withValidity(processStartTime, processEndTime)
+                .build()
+        ).toString();
         bundles[0].setProcessData(process);
 
         bundles[0].submitFeedsScheduleProcess(prism);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ui/LineageGraphTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ui/LineageGraphTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ui/LineageGraphTest.java
index 51fcb8d..cd32ea4 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ui/LineageGraphTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ui/LineageGraphTest.java
@@ -210,8 +210,8 @@ public class LineageGraphTest extends BaseUITestClass {
                         Assert.assertEquals(info.get("Runs on"), clusterName,
                             String.format(message, "cluster", processName));
                     }
-                    Assert.assertEquals(info.get("Owned by"), System.getProperty("user"
-                        + ".name"), "Entity should be owned by current system user.");
+                    Assert.assertEquals(info.get("Owned by"), System.getProperty("user.name"),
+                        "Entity should be owned by current system user.");
                 }
             }
             processPage.refresh();
@@ -300,7 +300,7 @@ public class LineageGraphTest extends BaseUITestClass {
             List<Edge> incEdges = lineageHelper.getEdgesByDirection(piVertex.getId(),
                 Direction.inComingEdges).getResults();
             List<Edge> outcEdges = lineageHelper.getEdgesByDirection(piVertex.getId(),
-                Direction.outGoingEdges).filterByType(Edge.LEBEL_TYPE.OUTPUT);
+                Direction.outGoingEdges).filterByType(Edge.LabelType.OUTPUT);
             assert expectedEdgesAPI.addAll(incEdges);
             assert expectedEdgesAPI.addAll(outcEdges);
             /** Check the number of edges and their location*/
@@ -328,9 +328,9 @@ public class LineageGraphTest extends BaseUITestClass {
                         break;
                     }
                 }
-                Assert.assertTrue(
-                    isEdgePresent, String.format("Edge %s-->%s isn't present on lineage or "
-                        + "painted incorrectly.", startVertexAPI.getName(), endVertexAPI.getName()));
+                Assert.assertTrue(isEdgePresent,
+                    String.format("Edge %s-->%s isn't present on lineage or painted incorrectly.",
+                        startVertexAPI.getName(), endVertexAPI.getName()));
             }
             processPage.refresh();
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/pom.xml b/falcon-regression/pom.xml
index c204d14..be17513 100644
--- a/falcon-regression/pom.xml
+++ b/falcon-regression/pom.xml
@@ -34,13 +34,12 @@
     <packaging>pom</packaging>
 
     <modules>
-        <module>checkstyle</module>
         <module>merlin-core</module>
         <module>merlin</module>
     </modules>
 
     <properties>
-        <oozie.version>3.2.2</oozie.version>
+        <oozie.version>4.0.0.2.1.7.0-784</oozie.version>
         <hive.version>0.13.1</hive.version>
     </properties>
 
@@ -359,6 +358,7 @@
                         <exclude>*.txt</exclude>
                         <exclude>.git/**</exclude>
                         <exclude>.idea/**</exclude>
+                        <exclude>oozie_logs/**</exclude>
                         <exclude>**/*.twiki</exclude>
                         <exclude>**/*.iml</exclude>
                         <exclude>**/target/**</exclude>
@@ -427,7 +427,7 @@
                     <!--debug>true</debug -->
                     <xmlOutput>true</xmlOutput>
                     <excludeFilterFile>
-                        ${project.basedir}/../checkstyle/src/main/resources/falcon/findbugs-exclude.xml
+                        ${project.basedir}/../../checkstyle/src/main/resources/falcon/findbugs-exclude.xml
                     </excludeFilterFile>
                     <failOnError>true</failOnError>
                     <skip>${skipCheck}</skip>


[3/3] incubator-falcon git commit: FALCON-928 Use falcon's checkstyle for falcon regression code. Contributed by Raghav Kumar Gautam

Posted by ra...@apache.org.
FALCON-928 Use falcon's checkstyle for falcon regression code. Contributed by Raghav Kumar Gautam


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/4f2b524d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/4f2b524d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/4f2b524d

Branch: refs/heads/master
Commit: 4f2b524d9bde075968e33b0be611d3e7152ee1b6
Parents: 9f1efe0
Author: Raghav Kumar Gautam <ra...@apache.org>
Authored: Tue Dec 2 10:48:25 2014 -0800
Committer: Raghav Kumar Gautam <ra...@apache.org>
Committed: Tue Dec 2 10:48:25 2014 -0800

----------------------------------------------------------------------
 falcon-regression/CHANGES.txt                   |   2 +
 falcon-regression/checkstyle/pom.xml            |  28 --
 .../resources/falcon/checkstyle-java-header.txt |  17 -
 .../resources/falcon/checkstyle-noframes.xsl    | 221 ---------
 .../src/main/resources/falcon/checkstyle.xml    | 235 ---------
 .../main/resources/falcon/findbugs-exclude.xml  |  34 --
 .../core/interfaces/IEntityManagerHelper.java   |   5 +-
 .../regression/core/response/lineage/Edge.java  |  14 +-
 .../core/response/lineage/EdgesResult.java      |   4 +-
 .../core/response/lineage/Vertex.java           |   4 +-
 .../core/supportClasses/HadoopFileEditor.java   |  25 +-
 .../regression/core/util/GraphAssert.java       |   2 +-
 .../regression/core/util/InstanceUtil.java      | 126 +----
 .../falcon/regression/core/util/OozieUtil.java  |   8 +-
 .../falcon/regression/core/util/XmlUtil.java    |  30 --
 .../falcon/regression/ExternalFSTest.java       |  30 +-
 .../regression/FeedClusterUpdateTest.java       | 468 ++++++++++--------
 .../regression/FeedInstanceStatusTest.java      | 127 +++--
 .../falcon/regression/FeedReplicationTest.java  |  97 ++--
 .../falcon/regression/InstanceSummaryTest.java  |  46 +-
 .../falcon/regression/TestngListener.java       |   7 +-
 .../regression/hcat/HCatFeedOperationsTest.java |  25 +-
 .../regression/hcat/HCatReplicationTest.java    |  38 +-
 .../regression/lineage/EntitySummaryTest.java   |  27 +-
 .../regression/lineage/LineageApiTest.java      |  22 +-
 .../lineage/ListFeedInstancesTest.java          |  28 +-
 .../prism/FeedDelayParallelTimeoutTest.java     |  11 +-
 .../regression/prism/PrismFeedDeleteTest.java   | 111 +++--
 .../PrismFeedReplicationPartitionExpTest.java   | 480 ++++++++++---------
 .../prism/PrismFeedReplicationUpdateTest.java   | 163 ++++---
 .../regression/prism/PrismFeedSnSTest.java      |  37 +-
 .../regression/prism/PrismFeedUpdateTest.java   |  73 +--
 .../regression/prism/PrismSubmitTest.java       |  26 +-
 .../prism/RescheduleKilledProcessTest.java      |  13 +-
 .../falcon/regression/ui/LineageGraphTest.java  |  12 +-
 falcon-regression/pom.xml                       |   6 +-
 36 files changed, 1040 insertions(+), 1562 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/CHANGES.txt
----------------------------------------------------------------------
diff --git a/falcon-regression/CHANGES.txt b/falcon-regression/CHANGES.txt
index 51fca02..ffcb6b5 100644
--- a/falcon-regression/CHANGES.txt
+++ b/falcon-regression/CHANGES.txt
@@ -33,6 +33,8 @@ Trunk (Unreleased)
    via Samarth Gupta)
 
   IMPROVEMENTS
+   FALCON-928 Use falcon's checkstyle for falcon regression code (Raghav Kumar Gautam)
+
    FALCON-909 Remove names of the contributors from xmls and code (Ruslan Ostafiychuk)
 
    FALCON-926 Fix problems found by findbugs in merlin and merlin-core (Ruslan Ostafiychuk and

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/checkstyle/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/checkstyle/pom.xml b/falcon-regression/checkstyle/pom.xml
deleted file mode 100644
index 89f785f..0000000
--- a/falcon-regression/checkstyle/pom.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <groupId>org.apache.falcon</groupId>
-    <artifactId>checkstyle</artifactId>
-    <version>0.5.1-incubating-SNAPSHOT</version>
-    <name>Checkstyle</name>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle-java-header.txt
----------------------------------------------------------------------
diff --git a/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle-java-header.txt b/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle-java-header.txt
deleted file mode 100644
index 5d5f1e3..0000000
--- a/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle-java-header.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle-noframes.xsl
----------------------------------------------------------------------
diff --git a/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle-noframes.xsl b/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle-noframes.xsl
deleted file mode 100644
index 4d10dd7..0000000
--- a/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle-noframes.xsl
+++ /dev/null
@@ -1,221 +0,0 @@
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  -->
-
-<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
-
-    <xsl:output method="html" indent="yes"/>
-    <xsl:decimal-format decimal-separator="." grouping-separator=","/>
-
-    <xsl:key name="files" match="file" use="@name"/>
-
-    <!-- Checkstyle XML Style Sheet by Stephane Bailliez <sb...@apache.org>         -->
-    <!-- Part of the Checkstyle distribution found at http://checkstyle.sourceforge.net -->
-    <!-- Usage (generates checkstyle_report.html):                                      -->
-    <!--    <checkstyle failonviolation="false" config="${check.config}">               -->
-    <!--      <fileset dir="${src.dir}" includes="**/*.java"/>                          -->
-    <!--      <formatter type="xml" toFile="${doc.dir}/checkstyle_report.xml"/>         -->
-    <!--    </checkstyle>                                                               -->
-    <!--    <style basedir="${doc.dir}" destdir="${doc.dir}"                            -->
-    <!--            includes="checkstyle_report.xml"                                    -->
-    <!--            style="${doc.dir}/checkstyle-noframes-sorted.xsl"/>                 -->
-
-    <xsl:template match="checkstyle">
-        <html>
-            <head>
-                <style type="text/css">
-                    .bannercell {
-                    border: 0px;
-                    padding: 0px;
-                    }
-                    body {
-                    margin-left: 10;
-                    margin-right: 10;
-                    font:normal 80% arial,helvetica,sanserif;
-                    background-color:#FFFFFF;
-                    color:#000000;
-                    }
-                    .a td {
-                    background: #efefef;
-                    }
-                    .b td {
-                    background: #fff;
-                    }
-                    th, td {
-                    text-align: left;
-                    vertical-align: top;
-                    }
-                    th {
-                    font-weight:bold;
-                    background: #ccc;
-                    color: black;
-                    }
-                    table, th, td {
-                    font-size:100%;
-                    border: none
-                    }
-                    table.log tr td, tr th {
-
-                    }
-                    h2 {
-                    font-weight:bold;
-                    font-size:140%;
-                    margin-bottom: 5;
-                    }
-                    h3 {
-                    font-size:100%;
-                    font-weight:bold;
-                    background: #525D76;
-                    color: white;
-                    text-decoration: none;
-                    padding: 5px;
-                    margin-right: 2px;
-                    margin-left: 2px;
-                    margin-bottom: 0;
-                    }
-                </style>
-            </head>
-            <body>
-                <a name="top"></a>
-                <!-- jakarta logo -->
-                <table border="0" cellpadding="0" cellspacing="0" width="100%">
-                    <tr>
-                        <td class="bannercell" rowspan="2">
-                            <!--a href="http://jakarta.apache.org/">
-                            <img src="http://jakarta.apache.org/images/jakarta-logo.gif" alt="http://jakarta.apache.org" align="left" border="0"/>
-                            </a-->
-                        </td>
-                        <td class="text-align:right">
-                            <h2>CheckStyle Audit</h2>
-                        </td>
-                    </tr>
-                    <tr>
-                        <td class="text-align:right">Designed for use with
-                            <a href='http://checkstyle.sourceforge.net/'>CheckStyle</a>
-                            and<a href='http://jakarta.apache.org'>Ant</a>.
-                        </td>
-                    </tr>
-                </table>
-                <hr size="1"/>
-
-                <!-- Summary part -->
-                <xsl:apply-templates select="." mode="summary"/>
-                <hr size="1" width="100%" align="left"/>
-
-                <!-- Package List part -->
-                <xsl:apply-templates select="." mode="filelist"/>
-                <hr size="1" width="100%" align="left"/>
-
-                <!-- For each package create its part -->
-                <xsl:apply-templates
-                        select="file[@name and generate-id(.) = generate-id(key('files', @name))]"/>
-
-                <hr size="1" width="100%" align="left"/>
-
-
-            </body>
-        </html>
-    </xsl:template>
-
-
-    <xsl:template match="checkstyle" mode="filelist">
-        <h3>Files</h3>
-        <table class="log" border="0" cellpadding="5" cellspacing="2" width="100%">
-            <tr>
-                <th>Name</th>
-                <th>Errors</th>
-            </tr>
-            <xsl:for-each
-                    select="file[@name and generate-id(.) = generate-id(key('files', @name))]">
-                <xsl:sort data-type="number" order="descending"
-                          select="count(key('files', @name)/error)"/>
-                <xsl:variable name="errorCount" select="count(error)"/>
-                <tr>
-                    <xsl:call-template name="alternated-row"/>
-                    <td>
-                        <a href="#f-{@name}">
-                            <xsl:value-of select="@name"/>
-                        </a>
-                    </td>
-                    <td>
-                        <xsl:value-of select="$errorCount"/>
-                    </td>
-                </tr>
-            </xsl:for-each>
-        </table>
-    </xsl:template>
-
-
-    <xsl:template match="file">
-        <a name="f-{@name}"></a>
-        <h3>File
-            <xsl:value-of select="@name"/>
-        </h3>
-
-        <table class="log" border="0" cellpadding="5" cellspacing="2" width="100%">
-            <tr>
-                <th>Error Description</th>
-                <th>Line</th>
-            </tr>
-            <xsl:for-each select="key('files', @name)/error">
-                <xsl:sort data-type="number" order="ascending" select="@line"/>
-                <tr>
-                    <xsl:call-template name="alternated-row"/>
-                    <td>
-                        <xsl:value-of select="@message"/>
-                    </td>
-                    <td>
-                        <xsl:value-of select="@line"/>
-                    </td>
-                </tr>
-            </xsl:for-each>
-        </table>
-        <a href="#top">Back to top</a>
-    </xsl:template>
-
-
-    <xsl:template match="checkstyle" mode="summary">
-        <h3>Summary</h3>
-        <xsl:variable name="fileCount"
-                      select="count(file[@name and generate-id(.) = generate-id(key('files', @name))])"/>
-        <xsl:variable name="errorCount" select="count(file/error)"/>
-        <table class="log" border="0" cellpadding="5" cellspacing="2" width="100%">
-            <tr>
-                <th>Files</th>
-                <th>Errors</th>
-            </tr>
-            <tr>
-                <xsl:call-template name="alternated-row"/>
-                <td>
-                    <xsl:value-of select="$fileCount"/>
-                </td>
-                <td>
-                    <xsl:value-of select="$errorCount"/>
-                </td>
-            </tr>
-        </table>
-    </xsl:template>
-
-    <xsl:template name="alternated-row">
-        <xsl:attribute name="class">
-            <xsl:if test="position() mod 2 = 1">a</xsl:if>
-            <xsl:if test="position() mod 2 = 0">b</xsl:if>
-        </xsl:attribute>
-    </xsl:template>
-</xsl:stylesheet>
-
-

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle.xml b/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle.xml
deleted file mode 100644
index 37e01b4..0000000
--- a/falcon-regression/checkstyle/src/main/resources/falcon/checkstyle.xml
+++ /dev/null
@@ -1,235 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-  -->
-
-<!DOCTYPE module PUBLIC
-        "-//Puppy Crawl//DTD Check Configuration 1.2//EN"
-        "http://www.puppycrawl.com/dtds/configuration_1_2.dtd">
-
-<!--
-
-  Checkstyle configuration for Falcon that is based on the sun_checks.xml file
-  that is bundled with Checkstyle and includes checks for:
-
-    - the Java Language Specification at
-      http://java.sun.com/docs/books/jls/second_edition/html/index.html
-
-    - the Sun Code Conventions at http://java.sun.com/docs/codeconv/
-
-    - the Javadoc guidelines at
-      http://java.sun.com/j2se/javadoc/writingdoccomments/index.html
-
-    - the JDK Api documentation http://java.sun.com/j2se/docs/api/index.html
-
-    - some best practices
-
-  Checkstyle is very configurable. Be sure to read the documentation at
-  http://checkstyle.sf.net (or in your downloaded distribution).
-
-  Most Checks are configurable, be sure to consult the documentation.
-
-  To completely disable a check, just comment it out or delete it from the file.
-
-  Finally, it is worth reading the documentation.
-
--->
-
-<module name="Checker">
-
-    <!-- Checks that a package.html file exists for each package.     -->
-    <!-- See http://checkstyle.sf.net/config_javadoc.html#PackageHtml -->
-    <!-- module name="PackageHtml"/ -->
-
-    <!-- Checks whether files end with a new line.                        -->
-    <!-- See http://checkstyle.sf.net/config_misc.html#NewlineAtEndOfFile -->
-    <module name="NewlineAtEndOfFile"/>
-
-    <!-- Checks for Headers                                -->
-    <!-- See http://checkstyle.sf.net/config_header.html   -->
-    <module name="Header">
-        <property name="headerFile" value="${checkstyle.header.file}"/>
-    </module>
-
-    <module name="FileLength"/>
-    <module name="FileTabCharacter"/>
-
-    <module name="TreeWalker">
-        <!-- Checks for Javadoc comments.                     -->
-        <!-- See http://checkstyle.sf.net/config_javadoc.html -->
-        <module name="JavadocType">
-            <property name="scope" value="public"/>
-            <property name="allowMissingParamTags" value="true"/>
-        </module>
-        <module name="JavadocStyle"/>
-
-        <module name="SuperClone"/>
-        <module name="SuperFinalize"/>
-
-        <!-- Checks for Naming Conventions.                  -->
-        <!-- See http://checkstyle.sf.net/config_naming.html -->
-        <module name="ConstantName"/>
-        <module name="ClassTypeParameterName">
-            <property name="format" value="^[A-Z]+$"/>
-        </module>
-        <module name="LocalFinalVariableName">
-            <!--<property name="format" value="^[A-Z][_A-Z0-9]*$"/>-->
-        </module>
-        <module name="LocalVariableName"/>
-        <module name="MemberName"/>
-        <module name="MethodName"/>
-        <module name="MethodTypeParameterName">
-            <property name="format" value="^[A-Z]+$"/>
-        </module>
-        <module name="PackageName"/>
-        <module name="ParameterName"/>
-        <module name="StaticVariableName"/>
-        <module name="TypeName"/>
-
-        <!-- Checks for imports                              -->
-        <!-- See http://checkstyle.sf.net/config_import.html -->
-        <module name="IllegalImport"/>
-        <!-- defaults to sun.* packages -->
-        <module name="RedundantImport"/>
-        <module name="UnusedImports"/>
-
-
-        <!-- Checks for Size Violations.                    -->
-        <!-- See http://checkstyle.sf.net/config_sizes.html -->
-        <module name="LineLength">
-            <property name="max" value="120"/>
-        </module>
-        <module name="MethodLength"/>
-        <module name="ParameterNumber"/>
-        <module name="OuterTypeNumber"/>
-
-        <!-- Checks for whitespace                               -->
-        <!-- See http://checkstyle.sf.net/config_whitespace.html -->
-        <module name="GenericWhitespace"/>
-        <module name="EmptyForIteratorPad"/>
-        <module name="MethodParamPad"/>
-        <module name="WhitespaceAround">
-            <property name="tokens" value="LITERAL_IF"/>
-        </module>
-        <module name="NoWhitespaceAfter">
-            <property name="tokens"
-                      value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS, UNARY_PLUS"/>
-        </module>
-        <module name="NoWhitespaceBefore"/>
-        <module name="OperatorWrap"/>
-        <module name="ParenPad"/>
-        <module name="TypecastParenPad"/>
-        <module name="WhitespaceAfter">
-            <property name="tokens" value="COMMA, SEMI"/>
-        </module>
-
-        <module name="Regexp">
-            <property name="format" value="[ \t]+$"/>
-            <property name="illegalPattern" value="true"/>
-            <property name="message" value="Trailing whitespace"/>
-        </module>
-
-        <!-- Modifier Checks                                    -->
-        <!-- See http://checkstyle.sf.net/config_modifiers.html -->
-        <module name="ModifierOrder"/>
-        <module name="RedundantModifier"/>
-
-
-        <!-- Checks for blocks. You know, those {}'s         -->
-        <!-- See http://checkstyle.sf.net/config_blocks.html -->
-        <module name="AvoidNestedBlocks"/>
-        <module name="EmptyBlock">
-            <!-- catch blocks need a statement or a comment. -->
-            <property name="option" value="text"/>
-            <property name="tokens" value="LITERAL_CATCH"/>
-        </module>
-        <module name="EmptyBlock">
-            <!-- all other blocks need a real statement. -->
-            <property name="option" value="stmt"/>
-            <property name="tokens" value="LITERAL_DO, LITERAL_ELSE, LITERAL_FINALLY,
-          LITERAL_IF, LITERAL_FOR, LITERAL_TRY, LITERAL_WHILE, INSTANCE_INIT,
-          STATIC_INIT"/>
-        </module>
-        <module name="LeftCurly"/>
-        <module name="NeedBraces"/>
-        <module name="RightCurly"/>
-
-
-        <!-- Checks for common coding problems               -->
-        <!-- See http://checkstyle.sf.net/config_coding.html -->
-        <!-- module name="AvoidInlineConditionals"/-->
-        <module name="EmptyStatement"/>
-        <module name="EqualsHashCode"/>
-        <module name="StringLiteralEquality"/>
-        <module name="HiddenField">
-            <property name="ignoreConstructorParameter" value="true"/>
-            <property name="ignoreAbstractMethods" value="true"/>
-            <property name="ignoreSetter" value="true"/>
-        </module>
-        <module name="IllegalInstantiation"/>
-        <module name="InnerAssignment"/>
-        <module name="MissingSwitchDefault"/>
-        <module name="RedundantThrows"/>
-        <module name="SimplifyBooleanExpression"/>
-        <module name="SimplifyBooleanReturn"/>
-        <module name="DefaultComesLast"/>
-
-        <!-- Checks for class design                         -->
-        <!-- See http://checkstyle.sf.net/config_design.html -->
-        <module name="FinalClass"/>
-        <module name="HideUtilityClassConstructor"/>
-        <module name="InterfaceIsType"/>
-        <module name="VisibilityModifier">
-            <property name="protectedAllowed" value="true"/>
-        </module>
-        <module name="MissingOverride"/>
-
-
-        <!-- Miscellaneous other checks.                   -->
-        <!-- See http://checkstyle.sf.net/config_misc.html -->
-        <module name="ArrayTypeStyle"/>
-        <module name="ArrayTrailingComma"/>
-        <!--
-          This generates too many false-positives on wrapped 'throws' clauses
-          to be really useful. Disabled for now.
-
-          Falcon style is:
-          * Spaces, not tabs.
-          * Indent by four spaces.
-          * Indent by four spaces when wrapping a line.
-        -->
-        <module name="Indentation">
-            <property name="basicOffset" value="4"/>
-            <property name="caseIndent" value="0"/>
-        </module>
-        <module name="TodoComment"/>
-        <module name="UpperEll"/>
-
-        <module name="FileContentsHolder"/>
-    </module>
-
-    <!-- allow warnings to be suppressed -->
-    <module name="SuppressionCommentFilter">
-        <property name="offCommentFormat"
-                  value="SUSPEND CHECKSTYLE CHECK ParameterNumberCheck|VisibilityModifierCheck|HiddenFieldCheck|MethodName"/>
-        <property name="onCommentFormat"
-                  value="RESUME CHECKSTYLE CHECK ParameterNumberCheck|VisibilityModifierCheck|HiddenFieldCheck|MethodName"/>
-        <property name="checkFormat"
-                  value="ParameterNumberCheck|VisibilityModifierCheck|HiddenFieldCheck|MethodName"/>
-    </module>
-
-</module>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/checkstyle/src/main/resources/falcon/findbugs-exclude.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/checkstyle/src/main/resources/falcon/findbugs-exclude.xml b/falcon-regression/checkstyle/src/main/resources/falcon/findbugs-exclude.xml
deleted file mode 100644
index 4de6b23..0000000
--- a/falcon-regression/checkstyle/src/main/resources/falcon/findbugs-exclude.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
- -->
-<FindBugsFilter>
-    <!-- These are generated by xjc compiler and hence excluded. -->
-    <Match>
-        <Or>
-            <Class name="~org.apache.falcon.entity.v0.feed.Validity"/>
-            <Class name="~org.apache.falcon.entity.v0.process.Validity"/>
-        </Or>
-    </Match>
-
-    <!--
-    Disable encoding as this might give an impression that Falcon code base is
-    "Internationalization" ready, but we haven't done anything consciously to guarantee that.
-    -->
-    <Match>
-        <Bug pattern="DM_DEFAULT_ENCODING"/>
-    </Match>
-</FindBugsFilter>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/interfaces/IEntityManagerHelper.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/interfaces/IEntityManagerHelper.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/interfaces/IEntityManagerHelper.java
index e4ab0ee..7eab15c 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/interfaces/IEntityManagerHelper.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/interfaces/IEntityManagerHelper.java
@@ -489,9 +489,8 @@ public abstract class IEntityManagerHelper {
             .createAndSendRequestProcessInstance(url, params, allColo, user);
     }
 
-    public InstancesSummaryResult getInstanceSummary(String entityName,
-                                                     String params
-    ) throws IOException, URISyntaxException, AuthenticationException, InterruptedException {
+    public InstancesSummaryResult getInstanceSummary(String entityName, String params)
+        throws IOException, URISyntaxException, AuthenticationException, InterruptedException {
         String url = createUrl(this.hostname + URLS.INSTANCE_SUMMARY.getValue(), getEntityType(),
             entityName, "");
         return (InstancesSummaryResult) InstanceUtil

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java
index a42416a..a230db0 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Edge.java
@@ -24,7 +24,7 @@ import com.google.gson.annotations.SerializedName;
 public class Edge extends GraphEntity {
 
     /** Class for representing different labels of edge. */
-    public static enum LEBEL_TYPE {
+    public static enum LabelType {
         @SerializedName("stored-in")STORED_IN,
         @SerializedName("runs-on")RUNS_ON,
         @SerializedName("input")INPUT,
@@ -62,7 +62,7 @@ public class Edge extends GraphEntity {
     private int inV;
 
     @SerializedName("_label")
-    private LEBEL_TYPE label;
+    private LabelType label;
 
     public String getId() {
         return id;
@@ -76,17 +76,17 @@ public class Edge extends GraphEntity {
         return inV;
     }
 
-    public LEBEL_TYPE getLabel() {
+    public LabelType getLabel() {
         return label;
     }
 
     @Override
     public String toString() {
         return "Edge{"
-                + "_id='" + id + '\''
-                + ", _outV=" + outV
-                + ", _inV=" + inV
-                + ", _label=" + label
+                + "id='" + id + '\''
+                + ", outV=" + outV
+                + ", inV=" + inV
+                + ", label=" + label
                 + '}';
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/EdgesResult.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/EdgesResult.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/EdgesResult.java
index 357e340..7bce1e4 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/EdgesResult.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/EdgesResult.java
@@ -34,11 +34,11 @@ public class EdgesResult extends GraphResult {
         return String.format("EdgesResult{totalSize=%d, results=%s}", totalSize, results);
     }
 
-    public List<Edge> filterByType(Edge.LEBEL_TYPE edgeLabel) {
+    public List<Edge> filterByType(Edge.LabelType edgeLabel) {
         return filterEdgesByType(results, edgeLabel);
     }
 
-    public List<Edge> filterEdgesByType(List<Edge> edges, Edge.LEBEL_TYPE edgeLabel) {
+    public List<Edge> filterEdgesByType(List<Edge> edges, Edge.LabelType edgeLabel) {
         final List<Edge> result = new ArrayList<Edge>();
         for (Edge edge : edges) {
             if (edge.getLabel() == edgeLabel) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Vertex.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Vertex.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Vertex.java
index 5bbe72f..c947dac 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Vertex.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/response/lineage/Vertex.java
@@ -96,8 +96,8 @@ public class Vertex extends GraphEntity {
     @Override
     public String toString() {
         return "Vertex{"
-                + "_id=" + id
-                + ", _type=" + nodeType
+                + "id=" + id
+                + ", nodeType=" + nodeType
                 + ", name='" + name + '\''
                 + ", type=" + type
                 + ", timestamp='" + timestamp + '\''

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/HadoopFileEditor.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/HadoopFileEditor.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/HadoopFileEditor.java
index 32f50dc..5a8d0a0 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/HadoopFileEditor.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/supportClasses/HadoopFileEditor.java
@@ -46,10 +46,12 @@ public class HadoopFileEditor {
         files = new ArrayList<String>();
     }
 
-    /*
-    method to edit a file present on HDFS. Path is the location on HDFS,
-    2nd param is the first instance of string after u want ur tesxt to be
-    inserted, 3rd param is the text u want to insert
+    /**
+     * Method to edit a file present on HDFS. Path is the location on HDFS,
+     * @param path path of the file to be edited
+     * @param putAfterString first instance of string after which the text is to be
+     * @param toBeInserted the text to be inserted
+     * @throws IOException
      */
     public void edit(String path, String putAfterString, String toBeInserted) throws IOException {
         paths.add(path);
@@ -65,20 +67,20 @@ public class HadoopFileEditor {
         if (fs.exists(file)) {
             fs.copyToLocalFile(file, new Path(currentFile));
             FileUtils.copyFile(new File(currentFile), new File(currentFile + ".bck"));
-            BufferedWriter bufwriter = new BufferedWriter(new FileWriter("tmp"));
+            BufferedWriter bufWriter = new BufferedWriter(new FileWriter("tmp"));
             BufferedReader br = new BufferedReader(new FileReader(currentFile));
             String line;
             boolean isInserted = false;
             while ((line = br.readLine()) != null) {
-                bufwriter.write(line);
-                bufwriter.write('\n');
+                bufWriter.write(line);
+                bufWriter.write('\n');
                 if (line.contains(putAfterString) && !isInserted) {
-                    bufwriter.write(toBeInserted);
+                    bufWriter.write(toBeInserted);
                     isInserted = true;
                 }
             }
             br.close();
-            bufwriter.close();
+            bufWriter.close();
             FileUtils.deleteQuietly(new File(currentFile));
             FileUtils.copyFile(new File("tmp"), new File(currentFile));
             FileUtils.deleteQuietly(new File("tmp"));
@@ -94,8 +96,9 @@ public class HadoopFileEditor {
         }
     }
 
-    /*
-    puts back the original file to HDFS that was editied by edit function
+    /**
+     * Restore back the original file to HDFS that was edited by edit function.
+     * @throws IOException
      */
     public void restore() throws IOException {
         for (int i = 0; i < paths.size(); i++) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/GraphAssert.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/GraphAssert.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/GraphAssert.java
index b87dd4e..499cab9 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/GraphAssert.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/GraphAssert.java
@@ -155,7 +155,7 @@ public final class GraphAssert {
      * @param minOccurrence required number of edges
      */
     public static void assertEdgePresenceMinOccur(final EdgesResult edgesResult,
-                                                  final Edge.LEBEL_TYPE edgeLabel,
+                                                  final Edge.LabelType edgeLabel,
                                                   final int minOccurrence) {
         int occurrence = 0;
         for(Edge edge : edgesResult.getResults()) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/InstanceUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/InstanceUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/InstanceUtil.java
index ede5cee..fcef570 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/InstanceUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/InstanceUtil.java
@@ -24,19 +24,10 @@ import com.google.gson.JsonDeserializer;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonSyntaxException;
 import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.Frequency;
 import org.apache.falcon.entity.v0.feed.ACL;
-import org.apache.falcon.entity.v0.feed.CatalogTable;
-import org.apache.falcon.entity.v0.feed.Cluster;
-import org.apache.falcon.entity.v0.feed.ClusterType;
-import org.apache.falcon.entity.v0.feed.Location;
-import org.apache.falcon.entity.v0.feed.LocationType;
-import org.apache.falcon.entity.v0.feed.Locations;
-import org.apache.falcon.entity.v0.feed.Retention;
-import org.apache.falcon.entity.v0.feed.Validity;
 import org.apache.falcon.entity.v0.process.Input;
 import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.Entities.ProcessMerlin;
@@ -90,8 +81,7 @@ public final class InstanceUtil {
     private static final EnumSet<Status> RUNNING_PREP_SUCCEEDED = EnumSet.of(Status.RUNNING,
         Status.PREP, Status.SUCCEEDED);
 
-    public static APIResult sendRequestProcessInstance(String
-            url, String user)
+    public static APIResult sendRequestProcessInstance(String url, String user)
         throws IOException, URISyntaxException, AuthenticationException, InterruptedException {
         return hitUrl(url, Util.getMethodType(url), user);
     }
@@ -594,96 +584,6 @@ public final class InstanceUtil {
     }
 
     /**
-     * Sets one more cluster to feed.
-     *
-     * @param feed          feed which is to be modified
-     * @param feedValidity  validity of the feed on the cluster
-     * @param feedRetention set retention of the feed on the cluster
-     * @param clusterName   cluster name, if null would erase all the cluster details from the feed
-     * @param clusterType   cluster type
-     * @param partition     - partition where data is available for feed
-     * @param locations     - location where data is picked
-     * @return - string representation of the modified feed
-     */
-    public static String setFeedCluster(String feed, Validity feedValidity, Retention feedRetention,
-            String clusterName,
-            ClusterType clusterType, String partition,
-            String... locations) {
-        Cluster feedCluster = createFeedCluster(feedValidity, feedRetention, clusterName,
-                clusterType, partition, null, locations);
-        return setFeedCluster(feed, clusterName, feedCluster);
-    }
-
-    public static String setFeedClusterWithTable(String feed, Validity feedValidity,
-                                                 Retention feedRetention, String clusterName,
-                                                 ClusterType clusterType, String partition,
-                                                 String tableUri) {
-        Cluster feedCluster = createFeedCluster(feedValidity, feedRetention, clusterName,
-                    clusterType, partition, tableUri, null);
-        return setFeedCluster(feed, clusterName, feedCluster);
-    }
-
-    private static String setFeedCluster(String feed, String clusterName, Cluster feedCluster) {
-        FeedMerlin f = new FeedMerlin(feed);
-        if (clusterName == null) {
-            f.getClusters().getClusters().clear();
-        } else {
-            f.getClusters().getClusters().add(feedCluster);
-        }
-        return f.toString();
-    }
-
-    private static CatalogTable getCatalogTable(String tableUri) {
-        CatalogTable catalogTable = new CatalogTable();
-        catalogTable.setUri(tableUri);
-        return catalogTable;
-    }
-
-    private static Cluster createFeedCluster(
-            Validity feedValidity, Retention feedRetention, String clusterName, ClusterType clusterType,
-            String partition, String tableUri, String[] locations) {
-        if (clusterName == null) {
-            return null;
-        }
-        Cluster cluster = new Cluster();
-        cluster.setName(clusterName);
-        cluster.setRetention(feedRetention);
-        if (clusterType != null) {
-            cluster.setType(clusterType);
-        }
-        cluster.setValidity(feedValidity);
-        if (partition != null) {
-            cluster.setPartition(partition);
-        }
-
-        // if table uri is not empty or null then set it.
-        if (StringUtils.isNotEmpty(tableUri)) {
-            cluster.setTable(getCatalogTable(tableUri));
-        }
-        Locations feedLocations = new Locations();
-        if (ArrayUtils.isNotEmpty(locations)) {
-            for (int i = 0; i < locations.length; i++) {
-                Location oneLocation = new Location();
-                oneLocation.setPath(locations[i]);
-                if (i == 0) {
-                    oneLocation.setType(LocationType.DATA);
-                } else if (i == 1) {
-                    oneLocation.setType(LocationType.STATS);
-                } else if (i == 2) {
-                    oneLocation.setType(LocationType.META);
-                } else if (i == 3) {
-                    oneLocation.setType(LocationType.TMP);
-                } else {
-                    Assert.fail("unexpected value of locations: " + Arrays.toString(locations));
-                }
-                feedLocations.getLocations().add(oneLocation);
-            }
-            cluster.setLocations(feedLocations);
-        }
-        return cluster;
-    }
-
-    /**
      * Retrieves replication coordinatorID from bundle of coordinators.
      */
     public static List<String> getReplicationCoordID(String bundlID,
@@ -731,30 +631,6 @@ public final class InstanceUtil {
     }
 
     /**
-     * Sets one more cluster to process definition.
-     *
-     * @param process     - process definition string representation
-     * @param clusterName - name of cluster
-     * @param validity    - cluster validity
-     * @return - string representation of modified process
-     */
-    public static String setProcessCluster(String process, String clusterName,
-            org.apache.falcon.entity.v0.process.Validity validity) {
-        org.apache.falcon.entity.v0.process.Cluster c =
-                new org.apache.falcon.entity.v0.process.Cluster();
-        c.setName(clusterName);
-        c.setValidity(validity);
-        ProcessMerlin p = new ProcessMerlin(process);
-
-        if (clusterName == null) {
-            p.getClusters().getClusters().set(0, null);
-        } else {
-            p.getClusters().getClusters().add(c);
-        }
-        return p.toString();
-    }
-
-    /**
      * Adds one input into process.
      *
      * @param process - where input should be inserted

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/OozieUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/OozieUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/OozieUtil.java
index b061700..95919ee 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/OozieUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/OozieUtil.java
@@ -256,9 +256,11 @@ public final class OozieUtil {
 
             for (CoordinatorJob coord : bundleJob.getCoordinators()) {
                 LOGGER.info("Appname is : " + coord.getAppName());
-                if ((coord.getAppName().contains("DEFAULT") && coord.getAppName().contains("PROCESS"))
-                        || (coord.getAppName().contains("REPLICATION") && coord.getAppName()
-                            .contains("FEED"))) {
+                if ((coord.getAppName().contains("DEFAULT")
+                        && coord.getAppName().contains("PROCESS"))
+                    ||
+                    (coord.getAppName().contains("REPLICATION")
+                        && coord.getAppName().contains("FEED"))) {
                     jobInfo = oozieClient.getCoordJobInfo(coord.getId());
                 } else {
                     LOGGER.info("Desired coord does not exists on " + oozieClient.getOozieUrl());

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/XmlUtil.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/XmlUtil.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/XmlUtil.java
index f0ea328..1041910 100644
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/XmlUtil.java
+++ b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/core/util/XmlUtil.java
@@ -18,10 +18,6 @@
 
 package org.apache.falcon.regression.core.util;
 
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.entity.v0.feed.ActionType;
-import org.apache.falcon.entity.v0.feed.Retention;
-import org.apache.falcon.entity.v0.feed.Validity;
 import org.custommonkey.xmlunit.Diff;
 import org.custommonkey.xmlunit.XMLUnit;
 import org.apache.log4j.Logger;
@@ -39,32 +35,6 @@ public final class XmlUtil {
     }
     private static final Logger LOGGER = Logger.getLogger(XmlUtil.class);
 
-    public static Validity createValidity(String start, String end) {
-        Validity v = new Validity();
-        v.setStart(TimeUtil.oozieDateToDate(start).toDate());
-        v.setEnd(TimeUtil.oozieDateToDate(end).toDate());
-        return v;
-    }
-
-    public static Retention createRetention(String limit, ActionType action) {
-        Retention r = new Retention();
-        r.setLimit(new Frequency(limit));
-        r.setAction(action);
-        return r;
-    }
-
-    public static org.apache.falcon.entity.v0.process.Validity
-    createProcessValidity(
-        String startTime, String endTime) {
-        org.apache.falcon.entity.v0.process.Validity v =
-            new org.apache.falcon.entity.v0.process.Validity();
-        LOGGER.info("instanceUtil.oozieDateToDate(endTime).toDate(): "
-            + TimeUtil.oozieDateToDate(endTime).toDate());
-        v.setEnd(TimeUtil.oozieDateToDate(endTime).toDate());
-        v.setStart(TimeUtil.oozieDateToDate(startTime).toDate());
-        return v;
-    }
-
     public static boolean isIdentical(String expected, String actual)
         throws IOException, SAXException {
         XMLUnit.setIgnoreWhitespace(true);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java
index 4359b16..0cd3284 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java
@@ -23,6 +23,7 @@ import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.falcon.entity.v0.feed.ActionType;
 import org.apache.falcon.entity.v0.feed.ClusterType;
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
 import org.apache.falcon.regression.core.helpers.ColoHelper;
@@ -34,7 +35,6 @@ import org.apache.falcon.regression.core.util.MatrixUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
@@ -148,22 +148,22 @@ public class ExternalFSTest extends BaseTestClass{
         String targetDataLocation = endpoint + testWasbTargetDir + datePattern;
         feed = InstanceUtil.setFeedFilePath(feed, sourcePath + '/' + datePattern);
         //erase all clusters from feed definition
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
         //set local cluster as source
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)),
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
         //set externalFS cluster as target
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)),
-            ClusterType.TARGET, null, targetDataLocation);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(targetDataLocation)
+                .build()).toString();
 
         //submit and schedule feed
         LOGGER.info("Feed : " + Util.prettyPrintXml(feed));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
index c2ae1f9..97048b8 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedClusterUpdateTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.falcon.regression;
 
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.entity.v0.feed.ActionType;
 import org.apache.falcon.entity.v0.feed.ClusterType;
@@ -97,10 +98,7 @@ public class FeedClusterUpdateTest extends BaseTestClass {
         }
         BundleUtil.submitAllClusters(prism, bundles[0], bundles[1], bundles[2]);
         feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
         startTime = TimeUtil.getTimeWrtSystemTime(-50);
     }
 
@@ -112,16 +110,23 @@ public class FeedClusterUpdateTest extends BaseTestClass {
     @Test(enabled = false, groups = {"multiCluster"})
     public void addSourceCluster() throws Exception {
         //add one source and one target , schedule only on source
-        feedOriginalSubmit = InstanceUtil.setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                TimeUtil.addMinsToTime(startTime, 65)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-            null);
-        feedOriginalSubmit = InstanceUtil.setFeedCluster(feedOriginalSubmit,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
+        feedOriginalSubmit = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
+        feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
 
         LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
@@ -158,23 +163,32 @@ public class FeedClusterUpdateTest extends BaseTestClass {
                 "RETENTION"), 0);
 
         //prepare updated Feed
-        feedUpdated = InstanceUtil.setFeedCluster(
-            feed, XmlUtil.createValidity(startTime,
-                TimeUtil.addMinsToTime(startTime, 65)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-            "US/${cluster.colo}");
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feedUpdated = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         response = prism.getFeedHelper().update(feedUpdated, feedUpdated);
         TimeUtil.sleepSeconds(20);
@@ -204,18 +218,24 @@ public class FeedClusterUpdateTest extends BaseTestClass {
     @Test(enabled = false, groups = {"multiCluster"})
     public void addTargetCluster() throws Exception {
         //add one source and one target , schedule only on source
-        feedOriginalSubmit = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                null);
-        feedOriginalSubmit = InstanceUtil.setFeedCluster(feedOriginalSubmit,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feedOriginalSubmit = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
+        feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
@@ -254,23 +274,33 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             0);
 
         //prepare updated Feed
-        feedUpdated = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                "US/${cluster.colo}");
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feedUpdated = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("US/${cluster.colo}")
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
 
@@ -303,12 +333,15 @@ public class FeedClusterUpdateTest extends BaseTestClass {
     @Test(enabled = false, groups = {"multiCluster"})
     public void add2SourceCluster() throws Exception {
         //add one source and one target , schedule only on source
-        feedOriginalSubmit = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                null);
+        feedOriginalSubmit = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
 
         LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
@@ -347,23 +380,33 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             0);
 
         //prepare updated Feed
-        feedUpdated = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                "US/${cluster.colo}");
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE, null);
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feedUpdated = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("US/${cluster.colo}")
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
 
@@ -396,12 +439,14 @@ public class FeedClusterUpdateTest extends BaseTestClass {
     @Test(enabled = false, groups = {"multiCluster"})
     public void add2TargetCluster() throws Exception {
         //add one source and one target , schedule only on source
-        feedOriginalSubmit = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                null);
+        feedOriginalSubmit = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
 
         LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
@@ -441,21 +486,30 @@ public class FeedClusterUpdateTest extends BaseTestClass {
             0);
 
         //prepare updated Feed
-        feedUpdated = InstanceUtil.setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                TimeUtil.addMinsToTime(startTime, 65)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-            null);
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.TARGET, null);
+        feedUpdated = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
 
         LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
 
@@ -488,12 +542,15 @@ public class FeedClusterUpdateTest extends BaseTestClass {
     @Test(enabled = false, groups = {"multiCluster"})
     public void add1Source1TargetCluster() throws Exception {
         //add one source and one target , schedule only on source
-        feedOriginalSubmit = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                null);
+        feedOriginalSubmit = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
 
         LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
@@ -532,23 +589,33 @@ public class FeedClusterUpdateTest extends BaseTestClass {
                 "RETENTION"), 0);
 
         //prepare updated Feed
-        feedUpdated = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                "US/${cluster.colo}");
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feedUpdated = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("US/${cluster.colo}")
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         LOGGER.info("Updated Feed: " + Util.prettyPrintXml(feedUpdated));
 
@@ -581,23 +648,32 @@ public class FeedClusterUpdateTest extends BaseTestClass {
     @Test(enabled = false, groups = {"multiCluster"})
     public void deleteSourceCluster() throws Exception {
         //add one source and one target , schedule only on source
-        feedOriginalSubmit = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                "US/${cluster.colo}");
-        feedOriginalSubmit = InstanceUtil.setFeedCluster(feedOriginalSubmit,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
-        feedOriginalSubmit = InstanceUtil.setFeedCluster(feedOriginalSubmit,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feedOriginalSubmit = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("US/${cluster.colo}")
+                .build())
+            .toString();
+        feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
+        feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
@@ -636,17 +712,23 @@ public class FeedClusterUpdateTest extends BaseTestClass {
                 "RETENTION"), 1);
 
         //prepare updated Feed
-        feedUpdated = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                null);
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
+        feedUpdated = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime,
+                    TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .build())
+            .toString();
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
 
         response = prism.getFeedHelper().update(feedUpdated, feedUpdated);
         TimeUtil.sleepSeconds(20);
@@ -701,31 +783,34 @@ public class FeedClusterUpdateTest extends BaseTestClass {
 
         //add two source and one target
 
-        feedOriginalSubmit = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feedOriginalSubmit = InstanceUtil
-            .setFeedCluster(feedOriginalSubmit, XmlUtil.createValidity(startTime,
-                    TimeUtil.addMinsToTime(startTime, 65)),
-                XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)),
-                ClusterType.SOURCE,
-                "US/${cluster.colo}");
-        feedOriginalSubmit = InstanceUtil.setFeedCluster(feedOriginalSubmit,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)),
-            ClusterType.TARGET, null);
-        feedOriginalSubmit = InstanceUtil.setFeedCluster(feedOriginalSubmit,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)),
-            ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feedOriginalSubmit = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("US/${cluster.colo}")
+                .build())
+            .toString();
+        feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 20),
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
+        feedOriginalSubmit = FeedMerlin.fromString(feedOriginalSubmit).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         LOGGER.info("Feed: " + Util.prettyPrintXml(feedOriginalSubmit));
 
@@ -765,18 +850,17 @@ public class FeedClusterUpdateTest extends BaseTestClass {
 
         //prepare updated Feed
 
-        feedUpdated = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feedUpdated = InstanceUtil.setFeedCluster(feedUpdated,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)),
-            ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        feedUpdated = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feedUpdated = FeedMerlin.fromString(feedUpdated).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(TimeUtil.addMinsToTime(startTime, 40),
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         LOGGER.info("Feed: " + Util.prettyPrintXml(feedUpdated));
 


[2/3] incubator-falcon git commit: FALCON-928 Use falcon's checkstyle for falcon regression code. Contributed by Raghav Kumar Gautam

Posted by ra...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
index 650d4c3..dfd5d0b 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.falcon.regression;
 
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.entity.v0.feed.ActionType;
 import org.apache.falcon.entity.v0.feed.ClusterType;
@@ -25,11 +26,9 @@ import org.apache.falcon.regression.core.helpers.ColoHelper;
 import org.apache.falcon.regression.core.util.AssertUtil;
 import org.apache.falcon.regression.core.util.BundleUtil;
 import org.apache.falcon.regression.core.util.HadoopUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.falcon.resource.InstancesResult;
 import org.apache.hadoop.fs.FileSystem;
@@ -82,7 +81,7 @@ public class FeedInstanceStatusTest extends BaseTestClass {
     }
 
     /**
-     * Goes through the whole feed replication workflow checking its instances status while
+     * Goes through the whole feed replication workflow checking its instances status while.
      * submitting feed, scheduling it, performing different combinations of actions like
      * -submit, -resume, -kill, -rerun.
      */
@@ -90,72 +89,77 @@ public class FeedInstanceStatusTest extends BaseTestClass {
     public void feedInstanceStatusRunning() throws Exception {
         bundles[0].setInputFeedDataPath(feedInputPath);
 
-        LOGGER.info("cluster bundle1: " + Util.prettyPrintXml(bundles[0].getClusters().get(0)));
         AssertUtil.assertSucceeded(prism.getClusterHelper()
             .submitEntity(bundles[0].getClusters().get(0)));
 
-        LOGGER.info("cluster bundle2: " + Util.prettyPrintXml(bundles[1].getClusters().get(0)));
         AssertUtil.assertSucceeded(prism.getClusterHelper()
             .submitEntity(bundles[1].getClusters().get(0)));
 
-        LOGGER.info("cluster bundle3: " + Util.prettyPrintXml(bundles[2].getClusters().get(0)));
         AssertUtil.assertSucceeded(prism.getClusterHelper()
             .submitEntity(bundles[2].getClusters().get(0)));
 
         String feed = bundles[0].getDataSets().get(0);
         String feedName = Util.readEntityName(feed);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
         String startTime = TimeUtil.getTimeWrtSystemTime(-50);
-
-        feed = InstanceUtil.setFeedCluster(feed, XmlUtil.createValidity(startTime,
-            TimeUtil.addMinsToTime(startTime, 65)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-            "US/${cluster.colo}");
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
-                TimeUtil.addMinsToTime(startTime, 85)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
-                TimeUtil.addMinsToTime(startTime, 110)),
-            XmlUtil.createRetention("hours(10)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
-            "UK/${cluster.colo}");
+        final String startPlus20Min = TimeUtil.addMinsToTime(startTime, 20);
+        final String startPlus40Min = TimeUtil.addMinsToTime(startTime, 40);
+        final String startPlus100Min = TimeUtil.addMinsToTime(startTime, 100);
+
+        feed = FeedMerlin.fromString(feed)
+            .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+                Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("US/${cluster.colo}")
+                .build())
+            .toString();
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startPlus20Min,
+                    TimeUtil.addMinsToTime(startTime, 85))
+                .withClusterType(ClusterType.TARGET)
+                .build())
+            .toString();
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("hours(10)", ActionType.DELETE)
+                .withValidity(startPlus40Min,
+                    TimeUtil.addMinsToTime(startTime, 110))
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("UK/${cluster.colo}")
+                .build())
+            .toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
         //status before submit
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 100)
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus100Min
                 + "&end=" + TimeUtil.addMinsToTime(startTime, 120));
 
         AssertUtil.assertSucceeded(prism.getFeedHelper().submitEntity(feed));
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 100));
+            "?start=" + startTime + "&end=" + startPlus100Min);
 
         AssertUtil.assertSucceeded(prism.getFeedHelper().schedule(feed));
 
         // both replication instances
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 100));
+            "?start=" + startTime + "&end=" + startPlus100Min);
 
         // single instance at -30
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 20));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus20Min);
+
         //single at -10
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
+
         //single at 10
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
+
         //single at 30
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
 
         String postFix = "/US/" + cluster2.getClusterHelper().getColoName();
         String prefix = bundles[0].getFeedDataPathPrefix();
@@ -169,51 +173,46 @@ public class FeedInstanceStatusTest extends BaseTestClass {
 
         // both replication instances
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 100));
+            "?start=" + startTime + "&end=" + startPlus100Min);
 
         // single instance at -30
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 20));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus20Min);
+
         //single at -10
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
+
         //single at 10
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
+
         //single at 30
-        prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
 
         LOGGER.info("Wait till feed goes into running ");
 
         //suspend instances -10
-        prism.getFeedHelper().getProcessInstanceSuspend(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+        prism.getFeedHelper().getProcessInstanceSuspend(feedName, "?start=" + startPlus40Min);
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 20)
-                + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
+            "?start=" + startPlus20Min + "&end=" + startPlus40Min);
 
         //resuspend -10 and suspend -30 source specific
         prism.getFeedHelper().getProcessInstanceSuspend(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 20)
-                + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
+            "?start=" + startPlus20Min + "&end=" + startPlus40Min);
         prism.getFeedHelper().getProcessInstanceStatus(feedName,
-            "?start=" + TimeUtil.addMinsToTime(startTime, 20)
-                + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
+            "?start=" + startPlus20Min + "&end=" + startPlus40Min);
 
         //resume -10 and -30
-        prism.getFeedHelper().getProcessInstanceResume(feedName, "?start=" + TimeUtil
-            .addMinsToTime(startTime, 20) + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
-        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + TimeUtil
-            .addMinsToTime(startTime, 20) + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
+        prism.getFeedHelper().getProcessInstanceResume(feedName,
+            "?start=" + startPlus20Min + "&end=" + startPlus40Min);
+        prism.getFeedHelper().getProcessInstanceStatus(feedName,
+            "?start=" + startPlus20Min + "&end=" + startPlus40Min);
 
         //get running instances
         prism.getFeedHelper().getRunningInstance(feedName);
 
         //rerun succeeded instance
         prism.getFeedHelper().getProcessInstanceRerun(feedName, "?start=" + startTime);
-        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startTime
-            + "&end=" + TimeUtil.addMinsToTime(startTime, 20));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName,
+            "?start=" + startTime + "&end=" + startPlus20Min);
 
         //kill instance
         prism.getFeedHelper().getProcessInstanceKill(feedName,
@@ -226,8 +225,8 @@ public class FeedInstanceStatusTest extends BaseTestClass {
 
         //rerun killed instance
         prism.getFeedHelper().getProcessInstanceRerun(feedName, "?start=" + startTime);
-        prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startTime
-            + "&end=" + TimeUtil.addMinsToTime(startTime, 110));
+        prism.getFeedHelper().getProcessInstanceStatus(feedName,
+            "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 110));
 
         //kill feed
         prism.getFeedHelper().delete(feed);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java
index b7afad4..6c61a4a 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java
@@ -31,7 +31,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.falcon.resource.InstancesResult;
 import org.apache.hadoop.fs.FileSystem;
@@ -115,22 +114,22 @@ public class FeedReplicationTest extends BaseTestClass {
         String feed = bundles[0].getDataSets().get(0);
         feed = InstanceUtil.setFeedFilePath(feed, feedDataLocation);
         //erase all clusters from feed definition
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
         //set cluster1 as source
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)),
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
         //set cluster2 as target
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)),
-            ClusterType.TARGET, null, targetDataLocation);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(targetDataLocation)
+                .build()).toString();
 
         //submit and schedule feed
         LOGGER.info("Feed : " + Util.prettyPrintXml(feed));
@@ -186,28 +185,30 @@ public class FeedReplicationTest extends BaseTestClass {
         String feed = bundles[0].getDataSets().get(0);
         feed = InstanceUtil.setFeedFilePath(feed, feedDataLocation);
         //erase all clusters from feed definition
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
         //set cluster1 as source
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)),
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
         //set cluster2 as target
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)),
-            ClusterType.TARGET, null, targetDataLocation);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(targetDataLocation)
+                .build()).toString();
         //set cluster3 as target
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)),
-            ClusterType.TARGET, null, targetDataLocation);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(targetDataLocation)
+                .build()).toString();
 
         //submit and schedule feed
         LOGGER.info("Feed : " + Util.prettyPrintXml(feed));
@@ -282,22 +283,22 @@ public class FeedReplicationTest extends BaseTestClass {
         String feed = bundles[0].getDataSets().get(0);
         feed = InstanceUtil.setFeedFilePath(feed, feedDataLocation);
         //erase all clusters from feed definition
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
         //set cluster1 as source
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[0].getClusters().get(0)),
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
         //set cluster2 as target
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)),
-            ClusterType.TARGET, null, targetDataLocation);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(targetDataLocation)
+                .build()).toString();
 
         //submit and schedule feed
         LOGGER.info("Feed : " + Util.prettyPrintXml(feed));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
index 8395476..da35e4c 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
@@ -18,6 +18,7 @@
 
 package org.apache.falcon.regression;
 
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.feed.ActionType;
@@ -29,7 +30,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.falcon.resource.InstancesSummaryResult;
 import org.apache.hadoop.fs.FileSystem;
@@ -222,27 +222,29 @@ public class InstanceSummaryTest extends BaseTestClass {
         String feed = bundles[0].getDataSets().get(0);
 
         //cluster_1 is target, cluster_2 is source and cluster_3 is neutral
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(100000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(100000)", ActionType.DELETE),
-                Util.readEntityName(bundles[2].getClusters().get(0)), null, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(100000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET,
-                null, feedInputPath);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("days(100000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
-                null, feedInputPath);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("days(100000)", ActionType.DELETE)
+                .withValidity(startTime, "2099-10-01T12:10Z")
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(100000)", ActionType.DELETE)
+                .withValidity(startTime, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(feedInputPath)
+                .build()).toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(100000)", ActionType.DELETE)
+                .withValidity(startTime, "2099-01-01T00:00Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withDataLocation(feedInputPath)
+                .build()).toString();
 
         //submit clusters
         Bundle.submitCluster(bundles[0], bundles[1], bundles[2]);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
index b1a2393..645d63b 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
@@ -40,7 +40,8 @@ import java.io.IOException;
 import java.util.Arrays;
 
 /**
- * A listener for test running.
+ * Testng listener class. This is useful for things that are applicable to all the tests as well
+ * taking actions that depend on test results.
  */
 public class TestngListener implements ITestListener, IExecutionListener {
     private static final Logger LOGGER = Logger.getLogger(TestngListener.class);
@@ -72,8 +73,8 @@ public class TestngListener implements ITestListener, IExecutionListener {
     public void onTestFailure(ITestResult result) {
         logEndOfTest(result, "FAILED");
         if (BaseUITestClass.getDriver() != null) {
-            byte[] scrFile = ((TakesScreenshot)BaseUITestClass.getDriver())
-                .getScreenshotAs(OutputType.BYTES);
+            byte[] scrFile =
+                ((TakesScreenshot)BaseUITestClass.getDriver()).getScreenshotAs(OutputType.BYTES);
             try {
                 String filename = OSUtil.getPath("target", "surefire-reports", "screenshots", String.format("%s.%s.png",
                         result.getTestClass().getRealClass().getSimpleName(), result.getName()));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
index 2a4a9c1..b24abe3 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
@@ -34,7 +34,6 @@ import org.apache.falcon.regression.core.util.HCatUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.Util;
 import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.hive.hcatalog.api.HCatClient;
 import org.apache.hive.hcatalog.api.HCatCreateTableDesc;
@@ -177,11 +176,13 @@ public class HCatFeedOperationsTest extends BaseTestClass {
 
         feed = bundles[0].getDataSets().get(0);
         // set cluster 2 as the target.
-        feed = InstanceUtil.setFeedClusterWithTable(feed,
-                XmlUtil.createValidity(startDate, endDate),
-                XmlUtil.createRetention("months(9000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-                tableUri);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("months(9000)", ActionType.DELETE)
+                .withValidity(startDate, endDate)
+                .withClusterType(ClusterType.TARGET)
+                .withTableUri(tableUri)
+                .build()).toString();
 
         AssertUtil.assertPartial(prism.getFeedHelper().submitAndSchedule(feed));
     }
@@ -206,11 +207,13 @@ public class HCatFeedOperationsTest extends BaseTestClass {
 
         feed = bundles[0].getDataSets().get(0);
         // set cluster 2 as the target.
-        feed = InstanceUtil.setFeedClusterWithTable(feed,
-                XmlUtil.createValidity(startDate, endDate),
-                XmlUtil.createRetention("months(9000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-                tableUri);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("months(9000)", ActionType.DELETE)
+                .withValidity(startDate, endDate)
+                .withClusterType(ClusterType.TARGET)
+                .withTableUri(tableUri)
+                .build()).toString();
 
         AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed));
         Assert.assertEquals(InstanceUtil

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
index af1a751..76c9078 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
@@ -19,6 +19,7 @@
 package org.apache.falcon.regression.hcat;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.falcon.entity.v0.EntityType;
@@ -34,7 +35,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -179,11 +179,13 @@ public class HCatReplicationTest extends BaseTestClass {
 
         String feed = bundles[0].getDataSets().get(0);
         // set the cluster 2 as the target.
-        feed = InstanceUtil.setFeedClusterWithTable(feed,
-            XmlUtil.createValidity(startDate, endDate),
-            XmlUtil.createRetention("months(9000)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-            tableUri);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("months(9000)", ActionType.DELETE)
+                .withValidity(startDate, endDate)
+                .withClusterType(ClusterType.TARGET)
+                .withTableUri(tableUri)
+                .build()).toString();
 
         AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed));
         TimeUtil.sleepSeconds(TIMEOUT);
@@ -265,17 +267,21 @@ public class HCatReplicationTest extends BaseTestClass {
 
         String feed = bundles[0].getDataSets().get(0);
         // set the cluster 2 as the target.
-        feed = InstanceUtil.setFeedClusterWithTable(feed,
-            XmlUtil.createValidity(startDate, endDate),
-            XmlUtil.createRetention("months(9000)", ActionType.DELETE),
-            Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-            tableUri);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("months(9000)", ActionType.DELETE)
+                .withValidity(startDate, endDate)
+                .withClusterType(ClusterType.TARGET)
+                .withTableUri(tableUri)
+                .build()).toString();
         // set the cluster 3 as the target.
-        feed = InstanceUtil.setFeedClusterWithTable(feed,
-            XmlUtil.createValidity(startDate, endDate),
-            XmlUtil.createRetention("months(9000)", ActionType.DELETE),
-            Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.TARGET, null,
-            tableUri);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+                .withRetention("months(9000)", ActionType.DELETE)
+                .withValidity(startDate, endDate)
+                .withClusterType(ClusterType.TARGET)
+                .withTableUri(tableUri)
+                .build()).toString();
 
         AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed));
         TimeUtil.sleepSeconds(TIMEOUT);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
index 63f98f2..1546886 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
@@ -37,7 +37,6 @@ import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.OozieUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.falcon.resource.APIResult;
 import org.apache.falcon.resource.EntitySummaryResult;
@@ -141,20 +140,22 @@ public class EntitySummaryTest extends BaseTestClass {
         String cluster1Def = bundles[0].getClusters().get(0);
         String cluster2Def = bundles[1].getClusters().get(0);
         //erase all clusters from feed definition
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
         //set cluster1 as source
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(cluster1Def), ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster1Def))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
         //set cluster2 as target
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(cluster2Def), ClusterType.TARGET, null, targetDataLocation);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster2Def))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(targetDataLocation)
+                .build()).toString();
         String clusterName = Util.readEntityName(cluster2Def);
 
         //submit clusters

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java
index cfe1119..bedb456 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java
@@ -62,7 +62,7 @@ public class LineageApiTest extends BaseTestClass {
     private static final Logger LOGGER = Logger.getLogger(LineageApiTest.class);
     private static final String TEST_NAME = "LineageApiTest";
     private static final String TEST_TAG =
-        Edge.LEBEL_TYPE.TESTNAME.toString().toLowerCase() + "=" + TEST_NAME;
+        Edge.LabelType.TESTNAME.toString().toLowerCase() + "=" + TEST_NAME;
     private static final String VERTEX_NOT_FOUND_REGEX = ".*Vertex.*%d.*not.*found.*\n?";
     private static final String INVALID_ARGUMENT_STR = "Invalid argument";
     private LineageHelper lineageHelper;
@@ -411,10 +411,10 @@ public class LineageApiTest extends BaseTestClass {
         final EdgesResult bothEdges =
             lineageHelper.getEdgesByDirection(clusterVertexId, Direction.bothEdges);
         GraphAssert.assertEdgeSanity(bothEdges);
-        Assert.assertEquals(bothEdges.filterByType(Edge.LEBEL_TYPE.STORED_IN).size(),
+        Assert.assertEquals(bothEdges.filterByType(Edge.LabelType.STORED_IN).size(),
             inputFeeds.length + outputFeeds.length,
             "There should be edge between the cluster and inputFeeds, outputFeeds");
-        Assert.assertEquals(bothEdges.filterByType(Edge.LEBEL_TYPE.CLUSTER_COLO).size(),
+        Assert.assertEquals(bothEdges.filterByType(Edge.LabelType.CLUSTER_COLO).size(),
             1, "There should be an edge from the cluster to colo");
         Assert.assertEquals(bothEdges.getTotalSize(), inputFeeds.length + outputFeeds.length + 2,
             "There should be edge from the cluster to inputFeeds & outputFeeds,"
@@ -425,7 +425,7 @@ public class LineageApiTest extends BaseTestClass {
         GraphAssert.assertEdgeSanity(inComingEdges);
         Assert.assertEquals(inComingEdges.getTotalSize(), inputFeeds.length + outputFeeds.length,
             "There should be edge from the cluster to inputFeeds & outputFeeds");
-        Assert.assertEquals(inComingEdges.filterByType(Edge.LEBEL_TYPE.STORED_IN).size(),
+        Assert.assertEquals(inComingEdges.filterByType(Edge.LabelType.STORED_IN).size(),
             inputFeeds.length + outputFeeds.length,
             "There should be edge from the cluster to inputFeeds & outputFeeds");
 
@@ -433,9 +433,9 @@ public class LineageApiTest extends BaseTestClass {
         final EdgesResult outGoingEdges =
             lineageHelper.getEdgesByDirection(clusterVertexId, Direction.outGoingEdges);
         GraphAssert.assertEdgeSanity(outGoingEdges);
-        Assert.assertEquals(outGoingEdges.filterByType(Edge.LEBEL_TYPE.CLUSTER_COLO).size(),
+        Assert.assertEquals(outGoingEdges.filterByType(Edge.LabelType.CLUSTER_COLO).size(),
             1, "There should be an edge from the cluster to colo");
-        Assert.assertEquals(outGoingEdges.filterByType(Edge.LEBEL_TYPE.TESTNAME).size(),
+        Assert.assertEquals(outGoingEdges.filterByType(Edge.LabelType.TESTNAME).size(),
             1, "There should be an edge from the cluster to classification");
         Assert.assertEquals(outGoingEdges.getTotalSize(), 2,
             "There should be an edge from the cluster to colo");
@@ -560,10 +560,10 @@ public class LineageApiTest extends BaseTestClass {
         Assert.assertTrue(edgesResult.getTotalSize() > 0, "Total number of edges should be"
             + " greater that zero but is: " + edgesResult.getTotalSize());
         GraphAssert.assertEdgeSanity(edgesResult);
-        GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LEBEL_TYPE.CLUSTER_COLO, 1);
-        GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LEBEL_TYPE.STORED_IN,
+        GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LabelType.CLUSTER_COLO, 1);
+        GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LabelType.STORED_IN,
             numInputFeeds + numOutputFeeds);
-        GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LEBEL_TYPE.OWNED_BY,
+        GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LabelType.OWNED_BY,
             1 + numInputFeeds + numOutputFeeds);
     }
 
@@ -573,11 +573,11 @@ public class LineageApiTest extends BaseTestClass {
         final EdgesResult outGoingEdges =
             lineageHelper.getEdgesByDirection(clusterVertexId, Direction.outGoingEdges);
         GraphAssert.assertEdgeSanity(outGoingEdges);
-        Assert.assertEquals(outGoingEdges.filterByType(Edge.LEBEL_TYPE.CLUSTER_COLO).size(),
+        Assert.assertEquals(outGoingEdges.filterByType(Edge.LabelType.CLUSTER_COLO).size(),
             1, "There should be an edge from the cluster to colo");
 
         final String clusterColoEdgeId =
-            outGoingEdges.filterByType(Edge.LEBEL_TYPE.CLUSTER_COLO).get(0).getId();
+            outGoingEdges.filterByType(Edge.LabelType.CLUSTER_COLO).get(0).getId();
         final Edge clusterColoEdge =
             lineageHelper.getEdgeById(clusterColoEdgeId).getResults();
         GraphAssert.assertEdgeSanity(clusterColoEdge);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java
index b585665..9a822ee 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java
@@ -21,6 +21,7 @@ import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.Frequency;
 import org.apache.falcon.entity.v0.feed.ActionType;
 import org.apache.falcon.entity.v0.feed.ClusterType;
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.regression.core.helpers.ColoHelper;
 import org.apache.falcon.regression.core.util.AssertUtil;
@@ -30,7 +31,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.falcon.resource.InstancesResult;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
@@ -99,20 +99,22 @@ public class ListFeedInstancesTest extends BaseTestClass {
         String cluster1Def = bundles[0].getClusters().get(0);
         String cluster2Def = bundles[1].getClusters().get(0);
         //erase all clusters from feed definition
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
         //set cluster1 as source
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(cluster1Def), ClusterType.SOURCE, null);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster1Def))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.SOURCE)
+                .build()).toString();
         //set cluster2 as target
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity(startTime, endTime),
-            XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
-            Util.readEntityName(cluster2Def), ClusterType.TARGET, null, targetDataLocation);
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster2Def))
+                .withRetention("days(1000000)", ActionType.DELETE)
+                .withValidity(startTime, endTime)
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(targetDataLocation)
+                .build()).toString();
 
         //submit clusters
         AssertUtil.assertSucceeded(prism.getClusterHelper().submitEntity(cluster1Def));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java
index 4ffc64f..f751119 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java
@@ -18,15 +18,12 @@
 
 package org.apache.falcon.regression.prism;
 
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.entity.v0.feed.ActionType;
-import org.apache.falcon.entity.v0.feed.ClusterType;
 import org.apache.falcon.regression.core.helpers.ColoHelper;
 import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.log4j.Logger;
 import org.testng.annotations.AfterClass;
@@ -85,11 +82,7 @@ public class FeedDelayParallelTimeoutTest extends BaseTestClass {
             new org.apache.falcon.entity.v0.Frequency(
                 "hours(5)");
 
-        feedOutput01 = InstanceUtil
-            .setFeedCluster(feedOutput01,
-                XmlUtil.createValidity("2010-10-01T12:00Z", "2099-01-01T00:00Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
-                ClusterType.SOURCE, null);
+        feedOutput01 = FeedMerlin.fromString(feedOutput01).clearFeedClusters().toString();
 
         // uncomment below 2 line when falcon in sync with ivory
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java
index 1d3f88d..33fea84 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java
@@ -19,6 +19,7 @@
 package org.apache.falcon.regression.prism;
 
 
+import org.apache.falcon.regression.Entities.FeedMerlin;
 import org.apache.falcon.regression.core.bundle.Bundle;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.feed.ActionType;
@@ -28,11 +29,9 @@ import org.apache.falcon.regression.core.interfaces.IEntityManagerHelper;
 import org.apache.falcon.regression.core.response.ServiceResponse;
 import org.apache.falcon.regression.core.util.AssertUtil;
 import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
 import org.apache.falcon.regression.core.util.OSUtil;
 import org.apache.falcon.regression.core.util.TimeUtil;
 import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
 import org.apache.falcon.regression.testHelper.BaseTestClass;
 import org.apache.log4j.Logger;
 import org.testng.Assert;
@@ -383,22 +382,26 @@ public class PrismFeedDeleteTest extends BaseTestClass {
         String startTimeServer2 = "2012-10-01T12:00Z";
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer1, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE,
-                "${cluster.colo}", baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer2, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-                baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(10000)", ActionType.DELETE)
+                .withValidity(startTimeServer1, "2099-10-01T12:10Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(10000)", ActionType.DELETE)
+                .withValidity(startTimeServer2, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(
+                    baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+                .build()).toString();
 
         Util.shutDownService(cluster1.getFeedHelper());
 
@@ -802,22 +805,26 @@ public class PrismFeedDeleteTest extends BaseTestClass {
         String startTimeServer2 = "2012-10-01T12:00Z";
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer1, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE,
-                "${cluster.colo}", baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN);
-
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer2, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-                baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(10000)", ActionType.DELETE)
+                .withValidity(startTimeServer1, "2099-10-01T12:10Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(10000)", ActionType.DELETE)
+                .withValidity(startTimeServer2, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(
+                    baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+                .build()).toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -903,20 +910,24 @@ public class PrismFeedDeleteTest extends BaseTestClass {
         String startTimeServer2 = "2012-10-01T12:00Z";
 
         String feed = bundles[0].getDataSets().get(0);
-        feed = InstanceUtil.setFeedCluster(feed,
-            XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
-            XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
-            ClusterType.SOURCE, null);
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer1, "2099-10-01T12:10Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE),
-                Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE,
-                "${cluster.colo}", baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN);
-        feed = InstanceUtil
-            .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer2, "2099-10-01T12:25Z"),
-                XmlUtil.createRetention("days(10000)", ActionType.DELETE),
-                Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
-                baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN);
+        feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+                .withRetention("days(10000)", ActionType.DELETE)
+                .withValidity(startTimeServer1, "2099-10-01T12:10Z")
+                .withClusterType(ClusterType.SOURCE)
+                .withPartition("${cluster.colo}")
+                .withDataLocation(baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+                .build())
+            .toString();
+        feed = FeedMerlin.fromString(feed).addFeedCluster(
+            new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+                .withRetention("days(10000)", ActionType.DELETE)
+                .withValidity(startTimeServer2, "2099-10-01T12:25Z")
+                .withClusterType(ClusterType.TARGET)
+                .withDataLocation(
+                    baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+                .build()).toString();
 
         LOGGER.info("feed: " + Util.prettyPrintXml(feed));
 
@@ -946,8 +957,8 @@ public class PrismFeedDeleteTest extends BaseTestClass {
         Util.shutDownService(cluster1.getFeedHelper());
 
         ServiceResponse response = prism.getFeedHelper().delete(feed);
-        Assert.assertTrue(response.getMessage().contains(cluster1Colo + "/org.apache.falcon"
-            + ".FalconException")
+        Assert.assertTrue(
+            response.getMessage().contains(cluster1Colo + "/org.apache.falcon.FalconException")
             && response.getMessage().contains(cluster2Colo + "/" + Util.readEntityName(feed)));
         AssertUtil.assertPartial(response);