You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@falcon.apache.org by ra...@apache.org on 2014/12/02 19:48:59 UTC
[2/3] incubator-falcon git commit: FALCON-928 Use falcon's checkstyle
for falcon regression code. Contributed by Raghav Kumar Gautam
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
index 650d4c3..dfd5d0b 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedInstanceStatusTest.java
@@ -18,6 +18,7 @@
package org.apache.falcon.regression;
+import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
import org.apache.falcon.entity.v0.feed.ActionType;
import org.apache.falcon.entity.v0.feed.ClusterType;
@@ -25,11 +26,9 @@ import org.apache.falcon.regression.core.helpers.ColoHelper;
import org.apache.falcon.regression.core.util.AssertUtil;
import org.apache.falcon.regression.core.util.BundleUtil;
import org.apache.falcon.regression.core.util.HadoopUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.falcon.resource.InstancesResult;
import org.apache.hadoop.fs.FileSystem;
@@ -82,7 +81,7 @@ public class FeedInstanceStatusTest extends BaseTestClass {
}
/**
- * Goes through the whole feed replication workflow checking its instances status while
+ * Goes through the whole feed replication workflow checking its instances status while.
* submitting feed, scheduling it, performing different combinations of actions like
* -submit, -resume, -kill, -rerun.
*/
@@ -90,72 +89,77 @@ public class FeedInstanceStatusTest extends BaseTestClass {
public void feedInstanceStatusRunning() throws Exception {
bundles[0].setInputFeedDataPath(feedInputPath);
- LOGGER.info("cluster bundle1: " + Util.prettyPrintXml(bundles[0].getClusters().get(0)));
AssertUtil.assertSucceeded(prism.getClusterHelper()
.submitEntity(bundles[0].getClusters().get(0)));
- LOGGER.info("cluster bundle2: " + Util.prettyPrintXml(bundles[1].getClusters().get(0)));
AssertUtil.assertSucceeded(prism.getClusterHelper()
.submitEntity(bundles[1].getClusters().get(0)));
- LOGGER.info("cluster bundle3: " + Util.prettyPrintXml(bundles[2].getClusters().get(0)));
AssertUtil.assertSucceeded(prism.getClusterHelper()
.submitEntity(bundles[2].getClusters().get(0)));
String feed = bundles[0].getDataSets().get(0);
String feedName = Util.readEntityName(feed);
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2009-02-01T00:00Z", "2012-01-01T00:00Z"),
- XmlUtil.createRetention("hours(10)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
String startTime = TimeUtil.getTimeWrtSystemTime(-50);
-
- feed = InstanceUtil.setFeedCluster(feed, XmlUtil.createValidity(startTime,
- TimeUtil.addMinsToTime(startTime, 65)),
- XmlUtil.createRetention("hours(10)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
- "US/${cluster.colo}");
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 20),
- TimeUtil.addMinsToTime(startTime, 85)),
- XmlUtil.createRetention("hours(10)", ActionType.DELETE),
- Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET, null);
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(TimeUtil.addMinsToTime(startTime, 40),
- TimeUtil.addMinsToTime(startTime, 110)),
- XmlUtil.createRetention("hours(10)", ActionType.DELETE),
- Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.SOURCE,
- "UK/${cluster.colo}");
+ final String startPlus20Min = TimeUtil.addMinsToTime(startTime, 20);
+ final String startPlus40Min = TimeUtil.addMinsToTime(startTime, 40);
+ final String startPlus100Min = TimeUtil.addMinsToTime(startTime, 100);
+
+ feed = FeedMerlin.fromString(feed)
+ .addFeedCluster(new FeedMerlin.FeedClusterBuilder(
+ Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("hours(10)", ActionType.DELETE)
+ .withValidity(startTime, TimeUtil.addMinsToTime(startTime, 65))
+ .withClusterType(ClusterType.SOURCE)
+ .withPartition("US/${cluster.colo}")
+ .build())
+ .toString();
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+ .withRetention("hours(10)", ActionType.DELETE)
+ .withValidity(startPlus20Min,
+ TimeUtil.addMinsToTime(startTime, 85))
+ .withClusterType(ClusterType.TARGET)
+ .build())
+ .toString();
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+ .withRetention("hours(10)", ActionType.DELETE)
+ .withValidity(startPlus40Min,
+ TimeUtil.addMinsToTime(startTime, 110))
+ .withClusterType(ClusterType.SOURCE)
+ .withPartition("UK/${cluster.colo}")
+ .build())
+ .toString();
LOGGER.info("feed: " + Util.prettyPrintXml(feed));
//status before submit
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 100)
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus100Min
+ "&end=" + TimeUtil.addMinsToTime(startTime, 120));
AssertUtil.assertSucceeded(prism.getFeedHelper().submitEntity(feed));
prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 100));
+ "?start=" + startTime + "&end=" + startPlus100Min);
AssertUtil.assertSucceeded(prism.getFeedHelper().schedule(feed));
// both replication instances
prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 100));
+ "?start=" + startTime + "&end=" + startPlus100Min);
// single instance at -30
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 20));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus20Min);
+
//single at -10
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
+
//single at 10
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
+
//single at 30
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
String postFix = "/US/" + cluster2.getClusterHelper().getColoName();
String prefix = bundles[0].getFeedDataPathPrefix();
@@ -169,51 +173,46 @@ public class FeedInstanceStatusTest extends BaseTestClass {
// both replication instances
prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 100));
+ "?start=" + startTime + "&end=" + startPlus100Min);
// single instance at -30
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 20));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus20Min);
+
//single at -10
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
+
//single at 10
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
+
//single at 30
- prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startPlus40Min);
LOGGER.info("Wait till feed goes into running ");
//suspend instances -10
- prism.getFeedHelper().getProcessInstanceSuspend(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 40));
+ prism.getFeedHelper().getProcessInstanceSuspend(feedName, "?start=" + startPlus40Min);
prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 20)
- + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
+ "?start=" + startPlus20Min + "&end=" + startPlus40Min);
//resuspend -10 and suspend -30 source specific
prism.getFeedHelper().getProcessInstanceSuspend(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 20)
- + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
+ "?start=" + startPlus20Min + "&end=" + startPlus40Min);
prism.getFeedHelper().getProcessInstanceStatus(feedName,
- "?start=" + TimeUtil.addMinsToTime(startTime, 20)
- + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
+ "?start=" + startPlus20Min + "&end=" + startPlus40Min);
//resume -10 and -30
- prism.getFeedHelper().getProcessInstanceResume(feedName, "?start=" + TimeUtil
- .addMinsToTime(startTime, 20) + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
- prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + TimeUtil
- .addMinsToTime(startTime, 20) + "&end=" + TimeUtil.addMinsToTime(startTime, 40));
+ prism.getFeedHelper().getProcessInstanceResume(feedName,
+ "?start=" + startPlus20Min + "&end=" + startPlus40Min);
+ prism.getFeedHelper().getProcessInstanceStatus(feedName,
+ "?start=" + startPlus20Min + "&end=" + startPlus40Min);
//get running instances
prism.getFeedHelper().getRunningInstance(feedName);
//rerun succeeded instance
prism.getFeedHelper().getProcessInstanceRerun(feedName, "?start=" + startTime);
- prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startTime
- + "&end=" + TimeUtil.addMinsToTime(startTime, 20));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName,
+ "?start=" + startTime + "&end=" + startPlus20Min);
//kill instance
prism.getFeedHelper().getProcessInstanceKill(feedName,
@@ -226,8 +225,8 @@ public class FeedInstanceStatusTest extends BaseTestClass {
//rerun killed instance
prism.getFeedHelper().getProcessInstanceRerun(feedName, "?start=" + startTime);
- prism.getFeedHelper().getProcessInstanceStatus(feedName, "?start=" + startTime
- + "&end=" + TimeUtil.addMinsToTime(startTime, 110));
+ prism.getFeedHelper().getProcessInstanceStatus(feedName,
+ "?start=" + startTime + "&end=" + TimeUtil.addMinsToTime(startTime, 110));
//kill feed
prism.getFeedHelper().delete(feed);
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java
index b7afad4..6c61a4a 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/FeedReplicationTest.java
@@ -31,7 +31,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.falcon.resource.InstancesResult;
import org.apache.hadoop.fs.FileSystem;
@@ -115,22 +114,22 @@ public class FeedReplicationTest extends BaseTestClass {
String feed = bundles[0].getDataSets().get(0);
feed = InstanceUtil.setFeedFilePath(feed, feedDataLocation);
//erase all clusters from feed definition
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
//set cluster1 as source
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(bundles[0].getClusters().get(0)),
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.SOURCE)
+ .build()).toString();
//set cluster2 as target
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)),
- ClusterType.TARGET, null, targetDataLocation);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(targetDataLocation)
+ .build()).toString();
//submit and schedule feed
LOGGER.info("Feed : " + Util.prettyPrintXml(feed));
@@ -186,28 +185,30 @@ public class FeedReplicationTest extends BaseTestClass {
String feed = bundles[0].getDataSets().get(0);
feed = InstanceUtil.setFeedFilePath(feed, feedDataLocation);
//erase all clusters from feed definition
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
//set cluster1 as source
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(bundles[0].getClusters().get(0)),
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.SOURCE)
+ .build()).toString();
//set cluster2 as target
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)),
- ClusterType.TARGET, null, targetDataLocation);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(targetDataLocation)
+ .build()).toString();
//set cluster3 as target
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(bundles[2].getClusters().get(0)),
- ClusterType.TARGET, null, targetDataLocation);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(targetDataLocation)
+ .build()).toString();
//submit and schedule feed
LOGGER.info("Feed : " + Util.prettyPrintXml(feed));
@@ -282,22 +283,22 @@ public class FeedReplicationTest extends BaseTestClass {
String feed = bundles[0].getDataSets().get(0);
feed = InstanceUtil.setFeedFilePath(feed, feedDataLocation);
//erase all clusters from feed definition
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
//set cluster1 as source
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(bundles[0].getClusters().get(0)),
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.SOURCE)
+ .build()).toString();
//set cluster2 as target
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)),
- ClusterType.TARGET, null, targetDataLocation);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(targetDataLocation)
+ .build()).toString();
//submit and schedule feed
LOGGER.info("Feed : " + Util.prettyPrintXml(feed));
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
index 8395476..da35e4c 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/InstanceSummaryTest.java
@@ -18,6 +18,7 @@
package org.apache.falcon.regression;
+import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.feed.ActionType;
@@ -29,7 +30,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.falcon.resource.InstancesSummaryResult;
import org.apache.hadoop.fs.FileSystem;
@@ -222,27 +222,29 @@ public class InstanceSummaryTest extends BaseTestClass {
String feed = bundles[0].getDataSets().get(0);
//cluster_1 is target, cluster_2 is source and cluster_3 is neutral
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(100000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
-
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-10-01T12:10Z"),
- XmlUtil.createRetention("days(100000)", ActionType.DELETE),
- Util.readEntityName(bundles[2].getClusters().get(0)), null, null);
-
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-10-01T12:25Z"),
- XmlUtil.createRetention("days(100000)", ActionType.DELETE),
- Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.TARGET,
- null, feedInputPath);
-
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTime, "2099-01-01T00:00Z"),
- XmlUtil.createRetention("days(100000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.SOURCE,
- null, feedInputPath);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+ .withRetention("days(100000)", ActionType.DELETE)
+ .withValidity(startTime, "2099-10-01T12:10Z")
+ .build()).toString();
+
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+ .withRetention("days(100000)", ActionType.DELETE)
+ .withValidity(startTime, "2099-10-01T12:25Z")
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(feedInputPath)
+ .build()).toString();
+
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("days(100000)", ActionType.DELETE)
+ .withValidity(startTime, "2099-01-01T00:00Z")
+ .withClusterType(ClusterType.SOURCE)
+ .withDataLocation(feedInputPath)
+ .build()).toString();
//submit clusters
Bundle.submitCluster(bundles[0], bundles[1], bundles[2]);
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
index b1a2393..645d63b 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/TestngListener.java
@@ -40,7 +40,8 @@ import java.io.IOException;
import java.util.Arrays;
/**
- * A listener for test running.
+ * Testng listener class. This is useful for things that are applicable to all the tests as well
+ * taking actions that depend on test results.
*/
public class TestngListener implements ITestListener, IExecutionListener {
private static final Logger LOGGER = Logger.getLogger(TestngListener.class);
@@ -72,8 +73,8 @@ public class TestngListener implements ITestListener, IExecutionListener {
public void onTestFailure(ITestResult result) {
logEndOfTest(result, "FAILED");
if (BaseUITestClass.getDriver() != null) {
- byte[] scrFile = ((TakesScreenshot)BaseUITestClass.getDriver())
- .getScreenshotAs(OutputType.BYTES);
+ byte[] scrFile =
+ ((TakesScreenshot)BaseUITestClass.getDriver()).getScreenshotAs(OutputType.BYTES);
try {
String filename = OSUtil.getPath("target", "surefire-reports", "screenshots", String.format("%s.%s.png",
result.getTestClass().getRealClass().getSimpleName(), result.getName()));
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
index 2a4a9c1..b24abe3 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatFeedOperationsTest.java
@@ -34,7 +34,6 @@ import org.apache.falcon.regression.core.util.HCatUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.Util;
import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.hive.hcatalog.api.HCatClient;
import org.apache.hive.hcatalog.api.HCatCreateTableDesc;
@@ -177,11 +176,13 @@ public class HCatFeedOperationsTest extends BaseTestClass {
feed = bundles[0].getDataSets().get(0);
// set cluster 2 as the target.
- feed = InstanceUtil.setFeedClusterWithTable(feed,
- XmlUtil.createValidity(startDate, endDate),
- XmlUtil.createRetention("months(9000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
- tableUri);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("months(9000)", ActionType.DELETE)
+ .withValidity(startDate, endDate)
+ .withClusterType(ClusterType.TARGET)
+ .withTableUri(tableUri)
+ .build()).toString();
AssertUtil.assertPartial(prism.getFeedHelper().submitAndSchedule(feed));
}
@@ -206,11 +207,13 @@ public class HCatFeedOperationsTest extends BaseTestClass {
feed = bundles[0].getDataSets().get(0);
// set cluster 2 as the target.
- feed = InstanceUtil.setFeedClusterWithTable(feed,
- XmlUtil.createValidity(startDate, endDate),
- XmlUtil.createRetention("months(9000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
- tableUri);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("months(9000)", ActionType.DELETE)
+ .withValidity(startDate, endDate)
+ .withClusterType(ClusterType.TARGET)
+ .withTableUri(tableUri)
+ .build()).toString();
AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed));
Assert.assertEquals(InstanceUtil
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
index af1a751..76c9078 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hcat/HCatReplicationTest.java
@@ -19,6 +19,7 @@
package org.apache.falcon.regression.hcat;
import org.apache.commons.lang.StringUtils;
+import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
import org.apache.falcon.entity.v0.cluster.Interfacetype;
import org.apache.falcon.entity.v0.EntityType;
@@ -34,7 +35,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -179,11 +179,13 @@ public class HCatReplicationTest extends BaseTestClass {
String feed = bundles[0].getDataSets().get(0);
// set the cluster 2 as the target.
- feed = InstanceUtil.setFeedClusterWithTable(feed,
- XmlUtil.createValidity(startDate, endDate),
- XmlUtil.createRetention("months(9000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
- tableUri);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("months(9000)", ActionType.DELETE)
+ .withValidity(startDate, endDate)
+ .withClusterType(ClusterType.TARGET)
+ .withTableUri(tableUri)
+ .build()).toString();
AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed));
TimeUtil.sleepSeconds(TIMEOUT);
@@ -265,17 +267,21 @@ public class HCatReplicationTest extends BaseTestClass {
String feed = bundles[0].getDataSets().get(0);
// set the cluster 2 as the target.
- feed = InstanceUtil.setFeedClusterWithTable(feed,
- XmlUtil.createValidity(startDate, endDate),
- XmlUtil.createRetention("months(9000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
- tableUri);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("months(9000)", ActionType.DELETE)
+ .withValidity(startDate, endDate)
+ .withClusterType(ClusterType.TARGET)
+ .withTableUri(tableUri)
+ .build()).toString();
// set the cluster 3 as the target.
- feed = InstanceUtil.setFeedClusterWithTable(feed,
- XmlUtil.createValidity(startDate, endDate),
- XmlUtil.createRetention("months(9000)", ActionType.DELETE),
- Util.readEntityName(bundles[2].getClusters().get(0)), ClusterType.TARGET, null,
- tableUri);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[2].getClusters().get(0)))
+ .withRetention("months(9000)", ActionType.DELETE)
+ .withValidity(startDate, endDate)
+ .withClusterType(ClusterType.TARGET)
+ .withTableUri(tableUri)
+ .build()).toString();
AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed));
TimeUtil.sleepSeconds(TIMEOUT);
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
index 63f98f2..1546886 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/EntitySummaryTest.java
@@ -37,7 +37,6 @@ import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.OozieUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.falcon.resource.APIResult;
import org.apache.falcon.resource.EntitySummaryResult;
@@ -141,20 +140,22 @@ public class EntitySummaryTest extends BaseTestClass {
String cluster1Def = bundles[0].getClusters().get(0);
String cluster2Def = bundles[1].getClusters().get(0);
//erase all clusters from feed definition
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
//set cluster1 as source
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(cluster1Def), ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster1Def))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.SOURCE)
+ .build()).toString();
//set cluster2 as target
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(cluster2Def), ClusterType.TARGET, null, targetDataLocation);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster2Def))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(targetDataLocation)
+ .build()).toString();
String clusterName = Util.readEntityName(cluster2Def);
//submit clusters
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java
index cfe1119..bedb456 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/LineageApiTest.java
@@ -62,7 +62,7 @@ public class LineageApiTest extends BaseTestClass {
private static final Logger LOGGER = Logger.getLogger(LineageApiTest.class);
private static final String TEST_NAME = "LineageApiTest";
private static final String TEST_TAG =
- Edge.LEBEL_TYPE.TESTNAME.toString().toLowerCase() + "=" + TEST_NAME;
+ Edge.LabelType.TESTNAME.toString().toLowerCase() + "=" + TEST_NAME;
private static final String VERTEX_NOT_FOUND_REGEX = ".*Vertex.*%d.*not.*found.*\n?";
private static final String INVALID_ARGUMENT_STR = "Invalid argument";
private LineageHelper lineageHelper;
@@ -411,10 +411,10 @@ public class LineageApiTest extends BaseTestClass {
final EdgesResult bothEdges =
lineageHelper.getEdgesByDirection(clusterVertexId, Direction.bothEdges);
GraphAssert.assertEdgeSanity(bothEdges);
- Assert.assertEquals(bothEdges.filterByType(Edge.LEBEL_TYPE.STORED_IN).size(),
+ Assert.assertEquals(bothEdges.filterByType(Edge.LabelType.STORED_IN).size(),
inputFeeds.length + outputFeeds.length,
"There should be edge between the cluster and inputFeeds, outputFeeds");
- Assert.assertEquals(bothEdges.filterByType(Edge.LEBEL_TYPE.CLUSTER_COLO).size(),
+ Assert.assertEquals(bothEdges.filterByType(Edge.LabelType.CLUSTER_COLO).size(),
1, "There should be an edge from the cluster to colo");
Assert.assertEquals(bothEdges.getTotalSize(), inputFeeds.length + outputFeeds.length + 2,
"There should be edge from the cluster to inputFeeds & outputFeeds,"
@@ -425,7 +425,7 @@ public class LineageApiTest extends BaseTestClass {
GraphAssert.assertEdgeSanity(inComingEdges);
Assert.assertEquals(inComingEdges.getTotalSize(), inputFeeds.length + outputFeeds.length,
"There should be edge from the cluster to inputFeeds & outputFeeds");
- Assert.assertEquals(inComingEdges.filterByType(Edge.LEBEL_TYPE.STORED_IN).size(),
+ Assert.assertEquals(inComingEdges.filterByType(Edge.LabelType.STORED_IN).size(),
inputFeeds.length + outputFeeds.length,
"There should be edge from the cluster to inputFeeds & outputFeeds");
@@ -433,9 +433,9 @@ public class LineageApiTest extends BaseTestClass {
final EdgesResult outGoingEdges =
lineageHelper.getEdgesByDirection(clusterVertexId, Direction.outGoingEdges);
GraphAssert.assertEdgeSanity(outGoingEdges);
- Assert.assertEquals(outGoingEdges.filterByType(Edge.LEBEL_TYPE.CLUSTER_COLO).size(),
+ Assert.assertEquals(outGoingEdges.filterByType(Edge.LabelType.CLUSTER_COLO).size(),
1, "There should be an edge from the cluster to colo");
- Assert.assertEquals(outGoingEdges.filterByType(Edge.LEBEL_TYPE.TESTNAME).size(),
+ Assert.assertEquals(outGoingEdges.filterByType(Edge.LabelType.TESTNAME).size(),
1, "There should be an edge from the cluster to classification");
Assert.assertEquals(outGoingEdges.getTotalSize(), 2,
"There should be an edge from the cluster to colo");
@@ -560,10 +560,10 @@ public class LineageApiTest extends BaseTestClass {
Assert.assertTrue(edgesResult.getTotalSize() > 0, "Total number of edges should be"
+ " greater that zero but is: " + edgesResult.getTotalSize());
GraphAssert.assertEdgeSanity(edgesResult);
- GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LEBEL_TYPE.CLUSTER_COLO, 1);
- GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LEBEL_TYPE.STORED_IN,
+ GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LabelType.CLUSTER_COLO, 1);
+ GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LabelType.STORED_IN,
numInputFeeds + numOutputFeeds);
- GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LEBEL_TYPE.OWNED_BY,
+ GraphAssert.assertEdgePresenceMinOccur(edgesResult, Edge.LabelType.OWNED_BY,
1 + numInputFeeds + numOutputFeeds);
}
@@ -573,11 +573,11 @@ public class LineageApiTest extends BaseTestClass {
final EdgesResult outGoingEdges =
lineageHelper.getEdgesByDirection(clusterVertexId, Direction.outGoingEdges);
GraphAssert.assertEdgeSanity(outGoingEdges);
- Assert.assertEquals(outGoingEdges.filterByType(Edge.LEBEL_TYPE.CLUSTER_COLO).size(),
+ Assert.assertEquals(outGoingEdges.filterByType(Edge.LabelType.CLUSTER_COLO).size(),
1, "There should be an edge from the cluster to colo");
final String clusterColoEdgeId =
- outGoingEdges.filterByType(Edge.LEBEL_TYPE.CLUSTER_COLO).get(0).getId();
+ outGoingEdges.filterByType(Edge.LabelType.CLUSTER_COLO).get(0).getId();
final Edge clusterColoEdge =
lineageHelper.getEdgeById(clusterColoEdgeId).getResults();
GraphAssert.assertEdgeSanity(clusterColoEdge);
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java
index b585665..9a822ee 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/lineage/ListFeedInstancesTest.java
@@ -21,6 +21,7 @@ import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.Frequency;
import org.apache.falcon.entity.v0.feed.ActionType;
import org.apache.falcon.entity.v0.feed.ClusterType;
+import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
import org.apache.falcon.regression.core.helpers.ColoHelper;
import org.apache.falcon.regression.core.util.AssertUtil;
@@ -30,7 +31,6 @@ import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.falcon.resource.InstancesResult;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
@@ -99,20 +99,22 @@ public class ListFeedInstancesTest extends BaseTestClass {
String cluster1Def = bundles[0].getClusters().get(0);
String cluster2Def = bundles[1].getClusters().get(0);
//erase all clusters from feed definition
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
//set cluster1 as source
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(cluster1Def), ClusterType.SOURCE, null);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster1Def))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.SOURCE)
+ .build()).toString();
//set cluster2 as target
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity(startTime, endTime),
- XmlUtil.createRetention("days(1000000)", ActionType.DELETE),
- Util.readEntityName(cluster2Def), ClusterType.TARGET, null, targetDataLocation);
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(cluster2Def))
+ .withRetention("days(1000000)", ActionType.DELETE)
+ .withValidity(startTime, endTime)
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(targetDataLocation)
+ .build()).toString();
//submit clusters
AssertUtil.assertSucceeded(prism.getClusterHelper().submitEntity(cluster1Def));
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java
index 4ffc64f..f751119 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/FeedDelayParallelTimeoutTest.java
@@ -18,15 +18,12 @@
package org.apache.falcon.regression.prism;
+import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.entity.v0.feed.ActionType;
-import org.apache.falcon.entity.v0.feed.ClusterType;
import org.apache.falcon.regression.core.helpers.ColoHelper;
import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.log4j.Logger;
import org.testng.annotations.AfterClass;
@@ -85,11 +82,7 @@ public class FeedDelayParallelTimeoutTest extends BaseTestClass {
new org.apache.falcon.entity.v0.Frequency(
"hours(5)");
- feedOutput01 = InstanceUtil
- .setFeedCluster(feedOutput01,
- XmlUtil.createValidity("2010-10-01T12:00Z", "2099-01-01T00:00Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
+ feedOutput01 = FeedMerlin.fromString(feedOutput01).clearFeedClusters().toString();
// uncomment below 2 line when falcon in sync with ivory
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/4f2b524d/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java
index 1d3f88d..33fea84 100644
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java
+++ b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/prism/PrismFeedDeleteTest.java
@@ -19,6 +19,7 @@
package org.apache.falcon.regression.prism;
+import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.feed.ActionType;
@@ -28,11 +29,9 @@ import org.apache.falcon.regression.core.interfaces.IEntityManagerHelper;
import org.apache.falcon.regression.core.response.ServiceResponse;
import org.apache.falcon.regression.core.util.AssertUtil;
import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
-import org.apache.falcon.regression.core.util.XmlUtil;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.log4j.Logger;
import org.testng.Assert;
@@ -383,22 +382,26 @@ public class PrismFeedDeleteTest extends BaseTestClass {
String startTimeServer2 = "2012-10-01T12:00Z";
String feed = bundles[0].getDataSets().get(0);
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
-
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer1, "2099-10-01T12:10Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE),
- Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE,
- "${cluster.colo}", baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN);
-
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer2, "2099-10-01T12:25Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
- baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+ .withRetention("days(10000)", ActionType.DELETE)
+ .withValidity(startTimeServer1, "2099-10-01T12:10Z")
+ .withClusterType(ClusterType.SOURCE)
+ .withPartition("${cluster.colo}")
+ .withDataLocation(baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+ .build())
+ .toString();
+
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("days(10000)", ActionType.DELETE)
+ .withValidity(startTimeServer2, "2099-10-01T12:25Z")
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(
+ baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+ .build()).toString();
Util.shutDownService(cluster1.getFeedHelper());
@@ -802,22 +805,26 @@ public class PrismFeedDeleteTest extends BaseTestClass {
String startTimeServer2 = "2012-10-01T12:00Z";
String feed = bundles[0].getDataSets().get(0);
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
-
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer1, "2099-10-01T12:10Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE),
- Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE,
- "${cluster.colo}", baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN);
-
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer2, "2099-10-01T12:25Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
- baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+ .withRetention("days(10000)", ActionType.DELETE)
+ .withValidity(startTimeServer1, "2099-10-01T12:10Z")
+ .withClusterType(ClusterType.SOURCE)
+ .withPartition("${cluster.colo}")
+ .withDataLocation(baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+ .build())
+ .toString();
+
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("days(10000)", ActionType.DELETE)
+ .withValidity(startTimeServer2, "2099-10-01T12:25Z")
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(
+ baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+ .build()).toString();
LOGGER.info("feed: " + Util.prettyPrintXml(feed));
@@ -903,20 +910,24 @@ public class PrismFeedDeleteTest extends BaseTestClass {
String startTimeServer2 = "2012-10-01T12:00Z";
String feed = bundles[0].getDataSets().get(0);
- feed = InstanceUtil.setFeedCluster(feed,
- XmlUtil.createValidity("2012-10-01T12:00Z", "2010-01-01T00:00Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE), null,
- ClusterType.SOURCE, null);
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer1, "2099-10-01T12:10Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE),
- Util.readEntityName(bundles[0].getClusters().get(0)), ClusterType.SOURCE,
- "${cluster.colo}", baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN);
- feed = InstanceUtil
- .setFeedCluster(feed, XmlUtil.createValidity(startTimeServer2, "2099-10-01T12:25Z"),
- XmlUtil.createRetention("days(10000)", ActionType.DELETE),
- Util.readEntityName(bundles[1].getClusters().get(0)), ClusterType.TARGET, null,
- baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN);
+ feed = FeedMerlin.fromString(feed).clearFeedClusters().toString();
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
+ .withRetention("days(10000)", ActionType.DELETE)
+ .withValidity(startTimeServer1, "2099-10-01T12:10Z")
+ .withClusterType(ClusterType.SOURCE)
+ .withPartition("${cluster.colo}")
+ .withDataLocation(baseHDFSDir + "/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+ .build())
+ .toString();
+ feed = FeedMerlin.fromString(feed).addFeedCluster(
+ new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0)))
+ .withRetention("days(10000)", ActionType.DELETE)
+ .withValidity(startTimeServer2, "2099-10-01T12:25Z")
+ .withClusterType(ClusterType.TARGET)
+ .withDataLocation(
+ baseHDFSDir + "/clusterPath/localDC/rc/billing" + MINUTE_DATE_PATTERN)
+ .build()).toString();
LOGGER.info("feed: " + Util.prettyPrintXml(feed));
@@ -946,8 +957,8 @@ public class PrismFeedDeleteTest extends BaseTestClass {
Util.shutDownService(cluster1.getFeedHelper());
ServiceResponse response = prism.getFeedHelper().delete(feed);
- Assert.assertTrue(response.getMessage().contains(cluster1Colo + "/org.apache.falcon"
- + ".FalconException")
+ Assert.assertTrue(
+ response.getMessage().contains(cluster1Colo + "/org.apache.falcon.FalconException")
&& response.getMessage().contains(cluster2Colo + "/" + Util.readEntityName(feed)));
AssertUtil.assertPartial(response);