You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@falcon.apache.org by pr...@apache.org on 2016/08/26 12:24:54 UTC

[2/2] falcon git commit: FALCON-2127 Falcon regression compilation issue and deleting depricated recipe test cases

FALCON-2127 Falcon regression compilation issue and deleting depricated recipe test cases

This fix deletes the recipe bases test cases since the recipes are deprecated and moving to server side extensions.
https://issues.apache.org/jira/browse/FALCON-634
Also fixes the current falcon regression compilation issues.

The following files are deleted .
RecipeMerlin.java
MirrorWizardPage.java
HdfsRecipeTest.java
HiveDRTest.java
HiveDbDRTest.java
HiveObjectCreator.java
RecipeExecLocation.java
MirrorSourceTargetOptionsTest.java
MirrorSummaryTest.java
MirrorTest.java

Author: Murali Ramasami <mr...@hortonworks.com>

Reviewers: "Pragya Mittal <mi...@gmail.com>"

Closes #275 from muraliramasami/master and squashes the following commits:

346866b [Murali Ramasami] FALCON-2127:Fix the falcon regression compilation issue and delete the depricated recipe test cases
d6719d3 [Murali Ramasami] FALCON-2127:Fix the falcon regression compilation issue and delete the depricated recipe test cases


Project: http://git-wip-us.apache.org/repos/asf/falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/falcon/commit/c00975e4
Tree: http://git-wip-us.apache.org/repos/asf/falcon/tree/c00975e4
Diff: http://git-wip-us.apache.org/repos/asf/falcon/diff/c00975e4

Branch: refs/heads/master
Commit: c00975e419a4bf0f191d51de1294b3c2e044a83a
Parents: 35b10b4
Author: Murali Ramasami <mr...@hortonworks.com>
Authored: Fri Aug 26 17:54:19 2016 +0530
Committer: Pragya Mittal <mi...@gmail.com>
Committed: Fri Aug 26 17:54:19 2016 +0530

----------------------------------------------------------------------
 falcon-regression/merlin-core/pom.xml           |   5 +
 .../regression/Entities/RecipeMerlin.java       | 366 ---------
 .../regression/ui/search/MirrorWizardPage.java  | 517 -------------
 .../falcon/regression/ui/search/PageHeader.java |  11 -
 .../regression/hive/dr/HdfsRecipeTest.java      | 131 ----
 .../falcon/regression/hive/dr/HiveDRTest.java   | 736 -------------------
 .../falcon/regression/hive/dr/HiveDbDRTest.java | 279 -------
 .../regression/hive/dr/HiveObjectCreator.java   | 208 ------
 .../regression/hive/dr/RecipeExecLocation.java  |  63 --
 .../searchUI/MirrorSourceTargetOptionsTest.java | 206 ------
 .../regression/searchUI/MirrorSummaryTest.java  | 207 ------
 .../falcon/regression/searchUI/MirrorTest.java  | 414 -----------
 falcon-regression/pom.xml                       |   6 +
 13 files changed, 11 insertions(+), 3138 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin-core/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/pom.xml b/falcon-regression/merlin-core/pom.xml
index d0c651d..4695bf1 100644
--- a/falcon-regression/merlin-core/pom.xml
+++ b/falcon-regression/merlin-core/pom.xml
@@ -227,6 +227,11 @@
         </dependency>
 
         <dependency>
+            <groupId>org.apache.falcon</groupId>
+            <artifactId>falcon-cli</artifactId>
+        </dependency>
+
+        <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-exec</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
deleted file mode 100644
index 9b9cff2..0000000
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
+++ /dev/null
@@ -1,366 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.Entities;
-
-import org.apache.commons.configuration.AbstractFileConfiguration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.filefilter.FalseFileFilter;
-import org.apache.commons.io.filefilter.RegexFileFilter;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.entity.v0.cluster.Interfacetype;
-import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
-import org.apache.falcon.entity.v0.process.ACL;
-import org.apache.falcon.entity.v0.process.PolicyType;
-import org.apache.falcon.entity.v0.process.Retry;
-import org.apache.falcon.regression.core.util.Config;
-import org.apache.falcon.regression.core.util.OSUtil;
-import org.apache.log4j.Logger;
-import org.testng.Assert;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.UUID;
-
-/** Class for representing a falcon recipe. */
-public final class RecipeMerlin {
-    private static final Logger LOGGER = Logger.getLogger(RecipeMerlin.class);
-    private static final String WORKFLOW_PATH_KEY = "falcon.recipe.workflow.path";
-    private static final String RECIPE_NAME_KEY = "falcon.recipe.name";
-    private static final String WRITE_DIR =
-        Config.getProperty("recipe.location", "/tmp/falcon-recipe");
-
-    private String template;
-    private AbstractFileConfiguration properties;
-    private String workflow;
-    private ClusterMerlin recipeCluster;
-    private ClusterMerlin srcCluster;
-    private ClusterMerlin tgtCluster;
-
-
-    public ClusterMerlin getRecipeCluster() {
-        return recipeCluster;
-    }
-
-    public ClusterMerlin getSrcCluster() {
-        return srcCluster;
-    }
-
-    public ClusterMerlin getTgtCluster() {
-        return tgtCluster;
-    }
-
-    public FalconCLI.RecipeOperation getRecipeOperation() {
-        return recipeOperation;
-    }
-
-    private FalconCLI.RecipeOperation recipeOperation;
-
-    private RecipeMerlin() {
-    }
-
-    public String getName() {
-        return properties.getString(RECIPE_NAME_KEY);
-    }
-
-    public void setUniqueName(String prefix) {
-        properties.setProperty(RECIPE_NAME_KEY, prefix + UUID.randomUUID().toString().split("-")[0]);
-    }
-
-    public String getSourceDir() {
-        return properties.getString("drSourceDir");
-    }
-
-    public RecipeMerlin withSourceDir(final String srcDir) {
-        properties.setProperty("drSourceDir", srcDir);
-        return this;
-    }
-
-    public String getTargetDir() {
-        return properties.getString("drTargetDir");
-    }
-
-    public RecipeMerlin withTargetDir(final String tgtDir) {
-        properties.setProperty("drTargetDir", tgtDir);
-        return this;
-    }
-
-    public String getSourceDb() {
-        return StringUtils.join(properties.getStringArray("sourceDatabase"), ',');
-    }
-
-    public RecipeMerlin withSourceDb(final String srcDatabase) {
-        properties.setProperty("sourceDatabase", srcDatabase);
-        return this;
-    }
-
-    public String getSourceTable() {
-        return StringUtils.join(properties.getStringArray("sourceTable"), ',');
-    }
-
-    public RecipeMerlin withSourceTable(final String tgtTable) {
-        properties.setProperty("sourceTable", tgtTable);
-        return this;
-    }
-
-    public RecipeMerlin withSourceCluster(ClusterMerlin sourceCluster) {
-        this.srcCluster = sourceCluster;
-        if (recipeOperation == FalconCLI.RecipeOperation.HDFS_REPLICATION) {
-            properties.setProperty("drSourceClusterFS", sourceCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-        } else {
-            properties.setProperty("sourceCluster", sourceCluster.getName());
-            properties.setProperty("sourceMetastoreUri", sourceCluster.getProperty("hive.metastore.uris"));
-            properties.setProperty("sourceHiveServer2Uri", sourceCluster.getProperty("hive.server2.uri"));
-            //properties.setProperty("sourceServicePrincipal",
-            //    sourceCluster.getProperty("hive.metastore.kerberos.principal"));
-            properties.setProperty("sourceStagingPath", sourceCluster.getLocation("staging"));
-            properties.setProperty("sourceNN", sourceCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-            properties.setProperty("sourceRM", sourceCluster.getInterfaceEndpoint(Interfacetype.EXECUTE));
-        }
-        return this;
-    }
-
-    public RecipeMerlin withTargetCluster(ClusterMerlin targetCluster) {
-        this.tgtCluster = targetCluster;
-        if (recipeOperation == FalconCLI.RecipeOperation.HDFS_REPLICATION) {
-            properties.setProperty("drTargetClusterFS", targetCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-        } else {
-            properties.setProperty("targetCluster", targetCluster.getName());
-            properties.setProperty("targetMetastoreUri", targetCluster.getProperty("hive.metastore.uris"));
-            properties.setProperty("targetHiveServer2Uri", targetCluster.getProperty("hive.server2.uri"));
-            //properties.setProperty("targetServicePrincipal",
-            //    targetCluster.getProperty("hive.metastore.kerberos.principal"));
-            properties.setProperty("targetStagingPath", targetCluster.getLocation("staging"));
-            properties.setProperty("targetNN", targetCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-            properties.setProperty("targetRM", targetCluster.getInterfaceEndpoint(Interfacetype.EXECUTE));
-        }
-        return this;
-    }
-
-    public RecipeMerlin withRecipeCluster(ClusterMerlin paramRecipeCluster) {
-        this.recipeCluster = paramRecipeCluster;
-        properties.setProperty("falcon.recipe.cluster.name", paramRecipeCluster.getName());
-        properties.setProperty("falcon.recipe.cluster.hdfs.writeEndPoint",
-            paramRecipeCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-        return this;
-    }
-
-    public RecipeMerlin withValidity(final String start, final String end) {
-        properties.setProperty("falcon.recipe.cluster.validity.start", start);
-        properties.setProperty("falcon.recipe.cluster.validity.end", end);
-        return this;
-    }
-
-    public String getValidityStart() {
-        return properties.getString("falcon.recipe.cluster.validity.start");
-    }
-
-    public String getValidityEnd() {
-        return properties.getString("falcon.recipe.cluster.validity.end");
-    }
-
-    public RecipeMerlin withFrequency(final Frequency frequency) {
-        properties.setProperty("falcon.recipe.process.frequency", frequency.toString());
-        return this;
-    }
-
-    public Frequency getFrequency() {
-        return Frequency.fromString(properties.getString("falcon.recipe.process.frequency"));
-    }
-
-    public String getMaxEvents() {
-        return properties.getString("maxEvents");
-    }
-
-    public String getReplicationMaxMaps() {
-        return properties.getString("replicationMaxMaps");
-    }
-
-    public String getDistCpMaxMaps() {
-        return properties.getString("distcpMaxMaps");
-    }
-
-    public String getMapBandwidth() {
-        return properties.getString("distcpMapBandwidth");
-    }
-
-    public Retry getRetry() {
-        final int retryAttempts = properties.getInt("falcon.recipe.retry.attempts");
-        final String retryDelay = properties.getString("falcon.recipe.retry.delay");
-        final String retryPolicy = properties.getString("falcon.recipe.retry.policy");
-
-        Retry retry = new Retry();
-        retry.setAttempts(retryAttempts);
-        retry.setDelay(Frequency.fromString(retryDelay));
-        retry.setPolicy(PolicyType.fromValue(retryPolicy));
-        return retry;
-    }
-
-    public ACL getAcl() {
-        ACL acl = new ACL();
-        acl.setOwner(properties.getString("falcon.recipe.acl.owner"));
-        acl.setGroup(properties.getString("falcon.recipe.acl.group"));
-        acl.setPermission(properties.getString("falcon.recipe.acl.permission"));
-        return acl;
-    }
-
-
-    /**
-     * Read recipe from a given directory. Expecting that recipe will follow these conventions.
-     * <br> 1. properties file will have .properties extension
-     * <br> 2. template file will have end with -template.xml
-     * <br> 3. workflow file will have end with -workflow.xml
-     * @param readPath the location from where recipe will be read
-     * @param recipeOperation operation of this recipe
-     */
-    public static RecipeMerlin readFromDir(final String readPath,
-                                           FalconCLI.RecipeOperation recipeOperation) {
-        Assert.assertTrue(StringUtils.isNotEmpty(readPath), "readPath for recipe can't be empty");
-        Assert.assertNotNull(recipeOperation, "readPath for recipe can't be empty");
-        RecipeMerlin instance = new RecipeMerlin();
-        instance.recipeOperation = recipeOperation;
-        LOGGER.info("Loading recipe from directory: " + readPath);
-        File directory = null;
-        try {
-            directory = new File(RecipeMerlin.class.getResource("/" + readPath).toURI());
-        } catch (URISyntaxException e) {
-            Assert.fail("could not find dir: " + readPath);
-        }
-        final Collection<File> propertiesFiles = FileUtils.listFiles(directory,
-            new RegexFileFilter(".*\\.properties"), FalseFileFilter.INSTANCE);
-        Assert.assertEquals(propertiesFiles.size(), 1,
-            "Expecting only one property file at: " + readPath +" found: " + propertiesFiles);
-        try {
-            instance.properties =
-                new PropertiesConfiguration(propertiesFiles.iterator().next());
-        } catch (ConfigurationException e) {
-            Assert.fail("Couldn't read recipe's properties file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-        instance.properties.setFileName(null); //prevent accidental overwrite of template
-        //removing defaults - specific test need to supplied this
-        instance.properties.clearProperty("sourceDatabase");
-        instance.properties.clearProperty("sourceTable");
-        instance.properties.clearProperty("targetDatabase");
-        instance.properties.clearProperty("targetTable");
-        instance.properties.setProperty("falcon.recipe.acl.owner", MerlinConstants.CURRENT_USER_NAME);
-        instance.properties.setProperty("falcon.recipe.acl.group", MerlinConstants.CURRENT_USER_GROUP);
-        instance.properties.setProperty("falcon.recipe.acl.permission", "*");
-
-        final Collection<File> templatesFiles = FileUtils.listFiles(directory,
-            new RegexFileFilter(".*-template\\.xml"), FalseFileFilter.INSTANCE);
-        Assert.assertEquals(templatesFiles.size(), 1,
-            "Expecting only one template file at: " + readPath + " found: " + templatesFiles);
-        try {
-            instance.template =
-                FileUtils.readFileToString(templatesFiles.iterator().next());
-        } catch (IOException e) {
-            Assert.fail("Couldn't read recipe's template file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-
-        final Collection<File> workflowFiles = FileUtils.listFiles(directory,
-            new RegexFileFilter(".*-workflow\\.xml"), FalseFileFilter.INSTANCE);
-        Assert.assertEquals(workflowFiles.size(), 1,
-            "Expecting only one workflow file at: " + readPath + " found: " + workflowFiles);
-        try {
-            instance.workflow = FileUtils.readFileToString(workflowFiles.iterator().next());
-        } catch (IOException e) {
-            Assert.fail("Couldn't read recipe's workflow file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-        return instance;
-    }
-
-    /**
-     * Write recipe.
-     */
-    private void write() {
-        final String templateFileLocation = OSUtil.concat(WRITE_DIR, getName() + "-template.xml");
-        try {
-            Assert.assertNotNull(templateFileLocation,
-                "Write location for template file is unexpectedly null.");
-            FileUtils.writeStringToFile(new File(templateFileLocation), template);
-        } catch (IOException e) {
-            Assert.fail("Couldn't write recipe's template file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-
-        final String workflowFileLocation = OSUtil.concat(WRITE_DIR, getName() + "-workflow.xml");
-        try {
-            Assert.assertNotNull(workflowFileLocation,
-                "Write location for workflow file is unexpectedly null.");
-            FileUtils.writeStringToFile(new File(workflowFileLocation), workflow);
-        } catch (IOException e) {
-            Assert.fail("Couldn't write recipe's workflow file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-        properties.setProperty(WORKFLOW_PATH_KEY, workflowFileLocation);
-        properties.setProperty("falcon.recipe.workflow.name", getName() + "-workflow");
-
-        final String propFileLocation = OSUtil.concat(WRITE_DIR, getName() + ".properties");
-        try {
-            Assert.assertNotNull(propFileLocation,
-                "Write location for properties file is unexpectedly null.");
-            properties.save(new File(propFileLocation));
-        } catch (ConfigurationException e) {
-            Assert.fail("Couldn't write recipe's process file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-    }
-
-    /**
-     * Get submission command.
-     */
-    public List<String> getSubmissionCommand() {
-        write();
-        final List<String> cmd = new ArrayList<>();
-        Collections.addAll(cmd, "recipe", "-name", getName(),
-            "-operation", recipeOperation.toString());
-        return cmd;
-    }
-
-    /**
-     * Set tags for recipe.
-     */
-    public List<String> getTags() {
-        final String tagsStr = properties.getString("falcon.recipe.tags");
-        if (StringUtils.isEmpty(tagsStr)) {
-            return new ArrayList<>();
-        }
-        return Arrays.asList(tagsStr.split(","));
-    }
-
-    /**
-     * Set tags for recipe.
-     */
-    public void setTags(List<String> tags) {
-        properties.setProperty("falcon.recipe.tags", StringUtils.join(tags, ','));
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/MirrorWizardPage.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/MirrorWizardPage.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/MirrorWizardPage.java
deleted file mode 100644
index f990c92..0000000
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/MirrorWizardPage.java
+++ /dev/null
@@ -1,517 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.ui.search;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.entity.v0.cluster.Interfacetype;
-import org.apache.falcon.entity.v0.process.ACL;
-import org.apache.falcon.entity.v0.process.Retry;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.Entities.RecipeMerlin;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.core.util.UIAssert;
-import org.apache.log4j.Logger;
-import org.joda.time.DateTime;
-import org.joda.time.format.DateTimeFormat;
-import org.openqa.selenium.By;
-import org.openqa.selenium.WebDriver;
-import org.openqa.selenium.WebElement;
-import org.openqa.selenium.support.FindBy;
-import org.openqa.selenium.support.FindBys;
-
-import java.util.EnumMap;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/** Page object of the Mirror creation page. */
-public class MirrorWizardPage extends AbstractSearchPage {
-    private static final Logger LOGGER = Logger.getLogger(MirrorWizardPage.class);
-    @FindBys({
-        @FindBy(className = "mainUIView"),
-        @FindBy(className = "formPage")
-    })
-    private WebElement mirrorBox;
-
-    public MirrorWizardPage(WebDriver driver) {
-        super(driver);
-    }
-
-    @Override
-    public void checkPage() {
-        UIAssert.assertDisplayed(mirrorBox, "Mirror box");
-    }
-
-
-    public void setName(String name) {
-        clearAndSetByNgModel("UIModel.name", name);
-    }
-
-    public void setTags(List<String> tags) {
-        //TODO add code here
-    }
-
-    public void setMirrorType(FalconCLI.RecipeOperation recipeOperation) {
-        switch (recipeOperation) {
-        case HDFS_REPLICATION:
-            driver.findElement(By.xpath("//button[contains(.,'File System')]")).click();
-            break;
-        case HIVE_DISASTER_RECOVERY:
-            driver.findElement(By.xpath("//button[contains(.,'HIVE')]")).click();
-            break;
-        default:
-            break;
-        }
-    }
-
-
-    public void setHiveReplication(RecipeMerlin recipeMerlin) {
-        if (StringUtils.isNotEmpty(recipeMerlin.getSourceTable())) {
-            clickById("targetHIVETablesRadio");
-            clearAndSetByNgModel("UIModel.source.hiveDatabase", recipeMerlin.getSourceDb());
-            clearAndSetByNgModel("UIModel.source.hiveTables", recipeMerlin.getSourceTable());
-        } else {
-            clickById("targetHIVEDatabaseRadio");
-            clearAndSetByNgModel("UIModel.source.hiveDatabases", recipeMerlin.getSourceDb());
-        }
-    }
-
-
-    public void setStartTime(String validityStartStr) {
-        final DateTime startDate = TimeUtil.oozieDateToDate(validityStartStr);
-
-        clearAndSetByNgModel("UIModel.validity.start", DateTimeFormat.forPattern("MM/dd/yyyy").print(startDate));
-        final WebElement startTimeBox = driver.findElement(By.className("startTimeBox"));
-        final List<WebElement> startHourAndMinute = startTimeBox.findElements(By.tagName("input"));
-        final WebElement hourText = startHourAndMinute.get(0);
-        final WebElement minuteText = startHourAndMinute.get(1);
-        clearAndSet(hourText, DateTimeFormat.forPattern("hh").print(startDate));
-        clearAndSet(minuteText, DateTimeFormat.forPattern("mm").print(startDate));
-        final WebElement amPmButton = startTimeBox.findElement(By.tagName("button"));
-        if (!amPmButton.getText().equals(DateTimeFormat.forPattern("a").print(startDate))) {
-            amPmButton.click();
-        }
-    }
-
-    public void setEndTime(String validityEndStr) {
-        final DateTime validityEnd = TimeUtil.oozieDateToDate(validityEndStr);
-
-        clearAndSetByNgModel("UIModel.validity.end", DateTimeFormat.forPattern("MM/dd/yyyy").print(validityEnd));
-        final WebElement startTimeBox = driver.findElement(By.className("endTimeBox"));
-        final List<WebElement> startHourAndMinute = startTimeBox.findElements(By.tagName("input"));
-        final WebElement hourText = startHourAndMinute.get(0);
-        final WebElement minuteText = startHourAndMinute.get(1);
-        clearAndSet(hourText, DateTimeFormat.forPattern("hh").print(validityEnd));
-        clearAndSet(minuteText, DateTimeFormat.forPattern("mm").print(validityEnd));
-        final WebElement amPmButton = startTimeBox.findElement(By.tagName("button"));
-        if (!amPmButton.getText().equals(DateTimeFormat.forPattern("a").print(validityEnd))) {
-            amPmButton.click();
-        }
-    }
-
-    public void toggleAdvancedOptions() {
-        final WebElement advanceOption = driver.findElement(By.xpath("//h4[contains(.,'Advanced options')]"));
-        advanceOption.click();
-    }
-
-    public void setFrequency(Frequency frequency) {
-        clearAndSetByNgModel("UIModel.frequency.number", frequency.getFrequency());
-        selectNgModelByVisibleText("UIModel.frequency.unit", frequency.getTimeUnit().name().toLowerCase());
-    }
-
-    public void setHdfsDistCpMaxMaps(String distCpMaxMaps) {
-        clearAndSetByNgModel("UIModel.allocation.hdfs.maxMaps", distCpMaxMaps);
-    }
-
-
-    public void setHdfsMaxBandwidth(String replicationMaxMaps) {
-        clearAndSetByNgModel("UIModel.allocation.hdfs.maxBandwidth", replicationMaxMaps);
-    }
-
-    public void setHiveDistCpMaxMaps(String distCpMaxMaps) {
-        clearAndSetByNgModel("UIModel.allocation.hive.maxMapsDistcp", distCpMaxMaps);
-    }
-
-
-    public void setHiveReplicationMaxMaps(String replicationMaxMaps) {
-        clearAndSetByNgModel("UIModel.allocation.hive.maxMapsMirror", replicationMaxMaps);
-    }
-
-    public void setMaxEvents(String maxEvents) {
-        clearAndSetByNgModel("UIModel.allocation.hive.maxMapsEvents", maxEvents);
-    }
-
-    public void setHiveMaxBandwidth(String maxBandWidth) {
-        clearAndSetByNgModel("UIModel.allocation.hive.maxBandwidth", maxBandWidth);
-    }
-
-
-    public void setSourceInfo(ClusterMerlin srcCluster) {
-        setSourceStaging(srcCluster.getLocation("staging"));
-        setSourceHiveEndpoint(srcCluster.getInterfaceEndpoint(Interfacetype.REGISTRY));
-    }
-
-    public void setSourceHiveEndpoint(String hiveEndpoint) {
-        clearAndSetByNgModel("UIModel.hiveOptions.source.hiveServerToEndpoint", hiveEndpoint);
-    }
-
-    public void setSourceStaging(String stagingLocation) {
-        clearAndSetByNgModel("UIModel.hiveOptions.source.stagingPath", stagingLocation);
-    }
-
-    public void setTargetInfo(ClusterMerlin tgtCluster) {
-        setTargetStaging(tgtCluster.getLocation("staging"));
-        setTargetHiveEndpoint(tgtCluster.getInterfaceEndpoint(Interfacetype.REGISTRY));
-    }
-
-    public void setTargetHiveEndpoint(String hiveEndPoint) {
-        clearAndSetByNgModel("UIModel.hiveOptions.target.hiveServerToEndpoint", hiveEndPoint);
-    }
-
-    public void setTargetStaging(String hiveEndpoint) {
-        clearAndSetByNgModel("UIModel.hiveOptions.target.stagingPath", hiveEndpoint);
-    }
-
-    public void setRetry(Retry retry) {
-        selectNgModelByVisibleText("UIModel.retry.policy", retry.getPolicy().toString().toUpperCase());
-        clearAndSetByNgModel("UIModel.retry.delay.number", retry.getDelay().getFrequency());
-        selectNgModelByVisibleText("UIModel.retry.delay.unit", retry.getDelay().getTimeUnit().name().toLowerCase());
-        clearAndSetByNgModel("UIModel.retry.attempts", String.valueOf(retry.getAttempts()));
-    }
-
-
-    public void setAcl(ACL acl) {
-        setAclOwner(acl.getOwner());
-        setAclGroup(acl.getGroup());
-        setAclPermission(acl.getPermission());
-    }
-
-    public void setAclOwner(String aclOwner) {
-        clearAndSetSlowlyByNgModel("UIModel.acl.owner", aclOwner);
-    }
-
-    public boolean isAclOwnerWarningDisplayed() {
-        final WebElement warning =
-            findElementByNgModel("UIModel.acl.owner").findElement(By.xpath("./following-sibling::*"));
-        waitForAngularToFinish();
-        return warning.isDisplayed();
-    }
-
-    public void setAclGroup(String aclGroup) {
-        clearAndSetSlowlyByNgModel("UIModel.acl.group", aclGroup);
-    }
-
-    public boolean isAclGroupWarningDisplayed() {
-        final WebElement warning =
-            findElementByNgModel("UIModel.acl.group").findElement(By.xpath("./following-sibling::*"));
-        waitForAngularToFinish();
-        return warning.isDisplayed();
-    }
-
-    public void setAclPermission(String aclPermission) {
-        clearAndSetSlowlyByNgModel("UIModel.acl.permissions", aclPermission);
-    }
-
-    public boolean isAclPermissionWarningDisplayed() {
-        final WebElement warning =
-            findElementByNgModel("UIModel.acl.permissions").findElement(By.xpath("./following-sibling::*"));
-        waitForAngularToFinish();
-        return warning.isDisplayed();
-    }
-
-    public void next() {
-        final WebElement nextButton = driver.findElement(By.xpath("//button[contains(.,'Next')]"));
-        nextButton.click();
-    }
-
-    public void previous() {
-        final WebElement prevButton = driver.findElement(By.xpath("//button[contains(.,'Previous')]"));
-        prevButton.click();
-    }
-
-    public void silentPrevious() {
-        try {
-            previous();
-        } catch (Exception ignore) {
-            //ignore
-        }
-    }
-
-    public void cancel() {
-        driver.findElement(By.xpath("//a[contains(.,'Cancel')]"));
-    }
-
-    public void save() {
-        final WebElement saveButton = driver.findElement(By.xpath("//button[contains(.,'Save')]"));
-        UIAssert.assertDisplayed(saveButton, "Save button in not displayed.");
-        saveButton.click();
-        waitForAlert();
-    }
-
-    public ClusterBlock getSourceBlock() {
-        return new ClusterBlock("Source");
-    }
-
-    public ClusterBlock getTargetBlock() {
-        return new ClusterBlock("Target");
-    }
-
-    /**
-     * Populates hive dr UI with parameters from recipe.
-     * @param recipe recipe
-     * @param overwriteDefaults should it overwrite HiveDR default values automatically picked up by UI
-     */
-    public void applyRecipe(RecipeMerlin recipe, boolean overwriteDefaults) {
-        final ClusterMerlin srcCluster = recipe.getSrcCluster();
-        final ClusterMerlin tgtCluster = recipe.getTgtCluster();
-        setName(recipe.getName());
-        setTags(recipe.getTags());
-        setMirrorType(recipe.getRecipeOperation());
-        getSourceBlock().selectCluster(srcCluster.getName());
-        getTargetBlock().selectCluster(tgtCluster.getName());
-        getSourceBlock().selectRunHere();
-        setStartTime(recipe.getValidityStart());
-        setEndTime(recipe.getValidityEnd());
-        toggleAdvancedOptions();
-        switch (recipe.getRecipeOperation()) {
-        case HDFS_REPLICATION:
-            getSourceBlock().setPath(recipe.getSourceDir());
-            getTargetBlock().setPath(recipe.getTargetDir());
-            setHdfsDistCpMaxMaps(recipe.getDistCpMaxMaps());
-            setHdfsMaxBandwidth(recipe.getDistCpMaxMaps());
-            break;
-        case HIVE_DISASTER_RECOVERY:
-            setHiveReplication(recipe);
-            setHiveDistCpMaxMaps(recipe.getDistCpMaxMaps());
-            setHiveReplicationMaxMaps(recipe.getReplicationMaxMaps());
-            setMaxEvents(recipe.getMaxEvents());
-            setHiveMaxBandwidth(recipe.getMapBandwidth());
-            if (overwriteDefaults) {
-                setSourceInfo(recipe.getSrcCluster());
-                setTargetInfo(recipe.getTgtCluster());
-            }
-            break;
-        default:
-            break;
-        }
-        setFrequency(recipe.getFrequency());
-        setRetry(recipe.getRetry());
-        setAcl(recipe.getAcl());
-    }
-
-    public int getStepNumber() {
-        try {
-            driver.findElement(By.xpath("//button[contains(.,'Previous')]"));
-            return 2;
-        } catch (Exception ignore) {
-            //ignore
-        }
-        return 1;
-    }
-
-    public Map<Summary, String> getSummaryProperties() {
-        String formText = driver.findElement(By.id("formSummaryBox")).getText();
-        Map<Summary, String> props = new EnumMap<>(Summary.class);
-        props.put(Summary.NAME, getBetween(formText, "Name", "Type"));
-        props.put(Summary.TYPE, getBetween(formText, "Type", "Tags"));
-        props.put(Summary.TAGS, getBetween(formText, "Tags", "Source"));
-        props.put(Summary.RUN_ON, getBetween(formText, "Run On", "Schedule"));
-        props.put(Summary.START, getBetween(formText, "Start on:", "End on:"));
-        props.put(Summary.END, getBetween(formText, "End on:", "Max Maps"));
-        props.put(Summary.MAX_MAPS, getBetween(formText, "Max Maps", "Max Bandwidth"));
-        props.put(Summary.MAX_BANDWIDTH, getBetween(formText, "Max Bandwidth", "ACL"));
-
-        props.put(Summary.ACL_OWNER, getBetween(formText, "Owner:", "Group:"));
-        props.put(Summary.ACL_GROUP, getBetween(formText, "Group:", "Permissions:"));
-        props.put(Summary.ACL_PERMISSIONS, getBetween(formText, "Permissions:", "Retry"));
-
-        props.put(Summary.RETRY_POLICY, getBetween(formText, "Policy:", "delay:"));
-        props.put(Summary.RETRY_DELAY, getBetween(formText, "delay:", "Attempts:"));
-        props.put(Summary.RETRY_ATTEMPTS, getBetween(formText, "Attempts:", "Frequency"));
-
-        props.put(Summary.FREQUENCY, getBetween(formText, "Frequency", "Previous"));
-
-        String source = getBetween(formText, "Source", "Target");
-        String target = getBetween(formText, "Target", "Run On");
-        if ("HDFS".equals(props.get(Summary.TYPE))) {
-            props.put(Summary.SOURCE_LOCATION, getBetween(source, "Location", "Path"));
-            props.put(Summary.TARGET_LOCATION, getBetween(target, "Location", "Path"));
-            if ("HDFS".equals(props.get(Summary.SOURCE_LOCATION))) {
-                props.put(Summary.SOURCE_CLUSTER, getBetween(source, "^", "Location"));
-                props.put(Summary.SOURCE_PATH, getBetween(source, "Path:", "$"));
-
-            } else {
-                props.put(Summary.SOURCE_PATH, getBetween(source, "Path:", "URL"));
-                props.put(Summary.SOURCE_URL, getBetween(source, "URL:", "$"));
-
-            }
-            if ("HDFS".equals(props.get(Summary.TARGET_LOCATION))) {
-                props.put(Summary.TARGET_CLUSTER, getBetween(target, "^", "Location"));
-                props.put(Summary.TARGET_PATH, getBetween(target, "Path:", "$"));
-
-            } else {
-                props.put(Summary.TARGET_PATH, getBetween(target, "Path:", "URL"));
-                props.put(Summary.TARGET_URL, getBetween(target, "URL:", "$"));
-
-            }
-
-        } else {
-            LOGGER.error("TODO Read info for HIVE replication.");
-        }
-
-
-        return props;
-    }
-
-    /** Parts of the mirror summary. */
-    public enum Summary {
-        NAME,
-        TYPE,
-        TAGS,
-        RUN_ON,
-        START,
-        END,
-        MAX_MAPS,
-        MAX_BANDWIDTH,
-        ACL_OWNER,
-        ACL_GROUP,
-        ACL_PERMISSIONS,
-        RETRY_POLICY,
-        RETRY_DELAY,
-        RETRY_ATTEMPTS,
-        FREQUENCY,
-        SOURCE_LOCATION,
-        SOURCE_PATH,
-        SOURCE_CLUSTER,
-        SOURCE_URL,
-        TARGET_LOCATION,
-        TARGET_PATH,
-        TARGET_CLUSTER,
-        TARGET_URL,
-
-    }
-
-    private static String getBetween(String text, String first, String second) {
-        Pattern pattern = Pattern.compile(".*" + first + "(.+)" + second + ".*", Pattern.DOTALL);
-        Matcher matcher = pattern.matcher(text);
-        if (matcher.find()) {
-            return matcher.group(1).trim();
-        } else {
-            return null;
-        }
-    }
-
-    /**
-     * Block of source or target cluster with parameters.
-     */
-    public final class ClusterBlock {
-        private final WebElement mainBlock;
-        private final WebElement runHereButton;
-        private final String blockType;
-
-        private ClusterBlock(String type) {
-            this.blockType = type;
-            mainBlock = driver.findElement(By.xpath("//h3[contains(.,'" + type + "')]/.."));
-            runHereButton = mainBlock.findElement(By.id("runJobOn" + type + "Radio"));
-        }
-
-        public Set<Location> getAvailableLocationTypes() {
-            List<WebElement> inputs = getLocationBox().findElements(By.xpath(".//input"));
-            Set<Location> result = EnumSet.noneOf(Location.class);
-            for (WebElement input : inputs) {
-                result.add(Location.getByInput(input));
-            }
-            return result;
-        }
-
-        public Location getSelectedLocationType() {
-            WebElement selected = getLocationBox()
-                .findElement(By.xpath("//input[contains(@class,'ng-valid-parse')]"));
-            return Location.getByInput(selected);
-        }
-
-        public void setLocationType(Location type) {
-            getLocationBox().findElement(By.xpath(
-                String.format(".//input[translate(@value,'azures','AZURES')='%s']", type.toString()))).click();
-        }
-
-        public void selectRunHere() {
-            runHereButton.click();
-        }
-
-        public Set<String> getAvailableClusters() {
-            List<WebElement> options = mainBlock.findElements(By.xpath(".//option[not(@disabled)]"));
-            Set<String> clusters = new TreeSet<>();
-            for (WebElement option : options) {
-                clusters.add(option.getText());
-            }
-            return clusters;
-        }
-
-        public void selectCluster(String clusterName) {
-            selectNgModelByVisibleText("UIModel." + blockType.toLowerCase() + ".cluster", clusterName);
-        }
-
-        public void setPath(String path) {
-            final WebElement srcPathElement = getPath();
-            clearAndSet(srcPathElement, path);
-        }
-
-        public boolean isRunHereSelected() {
-            return runHereButton.getAttribute("class").contains("ng-valid-parse");
-        }
-
-        public boolean isRunHereAvailable() {
-            return runHereButton.getAttribute("disabled") == null;
-        }
-
-
-        private WebElement getLocationBox() {
-            return mainBlock.findElement(By.className("locationBox"));
-        }
-
-        private WebElement getPath() {
-            return mainBlock.findElement(By.name(blockType.toLowerCase() + "ClusterPathInput"));
-        }
-
-
-
-    }
-
-    /**
-     * Types of source/target location.
-     */
-    public enum Location {
-        HDFS,
-        AZURE,
-        S3;
-
-        private static Location getByInput(WebElement input) {
-            return Location.valueOf(input.getAttribute("value").trim().toUpperCase());
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java
index 61d9475..15d27bd 100644
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java
+++ b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java
@@ -166,7 +166,6 @@ public class PageHeader {
             driver.get(oldUrl);
             doCreateProcess();
             driver.get(oldUrl);
-            doCreateMirror();
             driver.get(oldUrl);
         }
         //home button navigation
@@ -232,16 +231,6 @@ public class PageHeader {
         return processPage;
     }
 
-    public MirrorWizardPage doCreateMirror() {
-        UIAssert.assertDisplayed(mirrorCreateButton, "Mirror create button");
-        Assert.assertEquals(mirrorCreateButton.getText(), "Mirror",
-            "Unexpected text on create mirror button");
-        mirrorCreateButton.click();
-        final MirrorWizardPage mirrorPage = PageFactory.initElements(driver, MirrorWizardPage.class);
-        mirrorPage.checkPage();
-        return mirrorPage;
-    }
-
     private List<String> getHomeUrls() {
         List<String> urls = new ArrayList<>();
         String homeUrl = MerlinConstants.PRISM_URL;

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HdfsRecipeTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HdfsRecipeTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HdfsRecipeTest.java
deleted file mode 100644
index 07996d5..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HdfsRecipeTest.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.hive.dr;
-
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.Entities.RecipeMerlin;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.supportClasses.ExecResult;
-import org.apache.falcon.regression.core.util.AssertUtil;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.HadoopUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.MatrixUtil;
-import org.apache.falcon.regression.core.util.OSUtil;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.testHelper.BaseTestClass;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.CoordinatorAction;
-import org.apache.oozie.client.OozieClient;
-import org.testng.Assert;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- * Hdfs recipe test.
- */
-@Test(groups = {"embedded", "multiCluster"})
-public class HdfsRecipeTest extends BaseTestClass {
-    private static final Logger LOGGER = Logger.getLogger(HdfsRecipeTest.class);
-    private final ColoHelper cluster = servers.get(0);
-    private final ColoHelper cluster2 = servers.get(1);
-    private final FileSystem clusterFS = serverFS.get(0);
-    private final FileSystem clusterFS2 = serverFS.get(1);
-    private final OozieClient clusterOC = serverOC.get(0);
-    private final OozieClient clusterOC2 = serverOC.get(1);
-    private final String baseTestHDFSDir = cleanAndGetTestDir() + "/HdfsDR";
-    private String sourceDataLocation = baseTestHDFSDir + "/source";
-    private String targetDataLocation = baseTestHDFSDir + "/target";
-    private RecipeMerlin hdfsRecipe;
-
-    @DataProvider
-    public Object[][] getRecipeLocation() {
-        return MatrixUtil.crossProduct(RecipeExecLocation.values());
-    }
-
-    private void setUp(RecipeExecLocation recipeExecLocation) throws Exception {
-        bundles[0] = new Bundle(BundleUtil.readELBundle(), cluster);
-        bundles[1] = new Bundle(BundleUtil.readELBundle(), cluster2);
-        bundles[0].generateUniqueBundle(this);
-        bundles[1].generateUniqueBundle(this);
-        final ClusterMerlin srcCluster = bundles[0].getClusterElement();
-        final ClusterMerlin tgtCluster = bundles[1].getClusterElement();
-        String recipeDir = "HdfsRecipe";
-        Bundle.submitCluster(recipeExecLocation.getRecipeBundle(bundles[0], bundles[1]));
-        hdfsRecipe = RecipeMerlin.readFromDir(recipeDir, FalconCLI.RecipeOperation.HDFS_REPLICATION)
-            .withRecipeCluster(recipeExecLocation.getRecipeCluster(srcCluster, tgtCluster));
-        hdfsRecipe.withSourceCluster(srcCluster)
-            .withTargetCluster(tgtCluster)
-            .withFrequency(new Frequency("5", Frequency.TimeUnit.minutes))
-            .withValidity(TimeUtil.getTimeWrtSystemTime(-5), TimeUtil.getTimeWrtSystemTime(15));
-        hdfsRecipe.setUniqueName(this.getClass().getSimpleName());
-    }
-
-    /**
-     * Test recipe based replication with 1 source and 1 target.
-     */
-    @Test(dataProvider = "getRecipeLocation")
-    public void test1Source1Target(RecipeExecLocation execLocation) throws Exception {
-        setUp(execLocation);
-        hdfsRecipe.withSourceDir(sourceDataLocation).withTargetDir(targetDataLocation);
-        final List<String> command = hdfsRecipe.getSubmissionCommand();
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(execLocation.getRecipeOC(clusterOC, clusterOC2),
-            hdfsRecipe.getName(), 1, CoordinatorAction.Status.WAITING, EntityType.PROCESS);
-
-        HadoopUtil.copyDataToFolder(clusterFS, sourceDataLocation, OSUtil.NORMAL_INPUT);
-
-        InstanceUtil.waitTillInstanceReachState(execLocation.getRecipeOC(clusterOC, clusterOC2),
-            hdfsRecipe.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        //check if data has been replicated correctly
-        List<Path> cluster1ReplicatedData = HadoopUtil
-            .getAllFilesRecursivelyHDFS(clusterFS, new Path(sourceDataLocation));
-        List<Path> cluster2ReplicatedData = HadoopUtil
-            .getAllFilesRecursivelyHDFS(clusterFS2, new Path(targetDataLocation));
-
-        AssertUtil.checkForListSizes(cluster1ReplicatedData, cluster2ReplicatedData);
-
-        //particular check for https://issues.apache.org/jira/browse/FALCON-1643
-        ExecResult execResult = cluster.getProcessHelper().getCLIMetrics(hdfsRecipe.getName());
-        AssertUtil.assertCLIMetrics(execResult, hdfsRecipe.getName(), 1, true);
-    }
-
-    @AfterMethod(alwaysRun = true)
-    public void tearDown() throws IOException {
-        try {
-            prism.getProcessHelper().deleteByName(hdfsRecipe.getName(), null);
-        } catch (Exception e) {
-            LOGGER.info("Deletion of process: " + hdfsRecipe.getName() + " failed with exception: " + e);
-        }
-        removeTestClassEntities();
-        cleanTestsDirs();
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDRTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDRTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDRTest.java
deleted file mode 100644
index 7cd71e1..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDRTest.java
+++ /dev/null
@@ -1,736 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.hive.dr;
-
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.Entities.RecipeMerlin;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.supportClasses.ExecResult;
-import org.apache.falcon.regression.core.util.AssertUtil;
-import org.apache.falcon.regression.core.supportClasses.NotifyingAssert;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.HiveAssert;
-import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.MatrixUtil;
-import org.apache.falcon.regression.core.util.OozieUtil;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.testHelper.BaseTestClass;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hive.hcatalog.api.HCatClient;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.BundleJob;
-import org.apache.oozie.client.CoordinatorAction;
-import org.apache.oozie.client.CoordinatorJob;
-import org.apache.oozie.client.OozieClient;
-import org.testng.Assert;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.testng.asserts.SoftAssert;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.apache.falcon.regression.core.util.HiveUtil.runSql;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.bootstrapCopy;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createExternalTable;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createExternalPartitionedTable;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createPartitionedTable;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createSerDeTable;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createVanillaTable;
-
-/**
- * Hive DR Testing.
- */
-@Test(groups = {"embedded", "multiCluster"})
-public class HiveDRTest extends BaseTestClass {
-    private static final Logger LOGGER = Logger.getLogger(HiveDRTest.class);
-    private static final String DB_NAME = "hdr_sdb1";
-    private final ColoHelper cluster = servers.get(0);
-    private final ColoHelper cluster2 = servers.get(1);
-    private final ColoHelper cluster3 = servers.get(2);
-    private final FileSystem clusterFS = serverFS.get(0);
-    private final FileSystem clusterFS2 = serverFS.get(1);
-    private final FileSystem clusterFS3 = serverFS.get(2);
-    private final OozieClient clusterOC = serverOC.get(0);
-    private final OozieClient clusterOC2 = serverOC.get(1);
-    private final OozieClient clusterOC3 = serverOC.get(2);
-    private final String baseTestHDFSDir = cleanAndGetTestDir() + "/HiveDR/";
-    private HCatClient clusterHC;
-    private HCatClient clusterHC2;
-    private RecipeMerlin recipeMerlin;
-    private Connection connection;
-    private Connection connection2;
-
-    @DataProvider
-    public Object[][] getRecipeLocation() {
-        return MatrixUtil.crossProduct(RecipeExecLocation.values());
-    }
-
-    private void setUp(RecipeExecLocation recipeExecLocation) throws Exception {
-        clusterHC = cluster.getClusterHelper().getHCatClient();
-        clusterHC2 = cluster2.getClusterHelper().getHCatClient();
-        bundles[0] = new Bundle(BundleUtil.readHCatBundle(), cluster);
-        bundles[1] = new Bundle(BundleUtil.readHCatBundle(), cluster2);
-        bundles[0].generateUniqueBundle(this);
-        bundles[1].generateUniqueBundle(this);
-        final ClusterMerlin srcCluster = bundles[0].getClusterElement();
-        final ClusterMerlin tgtCluster = bundles[1].getClusterElement();
-        String recipeDir = "HiveDrRecipe";
-        if (MerlinConstants.IS_SECURE) {
-            recipeDir = "HiveDrSecureRecipe";
-        }
-        Bundle.submitCluster(recipeExecLocation.getRecipeBundle(bundles[0], bundles[1]));
-        recipeMerlin = RecipeMerlin.readFromDir(recipeDir, FalconCLI.RecipeOperation.HIVE_DISASTER_RECOVERY)
-            .withRecipeCluster(recipeExecLocation.getRecipeCluster(srcCluster, tgtCluster));
-        recipeMerlin.withSourceCluster(srcCluster)
-            .withTargetCluster(tgtCluster)
-            .withFrequency(new Frequency("5", Frequency.TimeUnit.minutes))
-            .withValidity(TimeUtil.getTimeWrtSystemTime(-5), TimeUtil.getTimeWrtSystemTime(15));
-        recipeMerlin.setUniqueName(this.getClass().getSimpleName());
-
-        connection = cluster.getClusterHelper().getHiveJdbcConnection();
-        runSql(connection, "drop database if exists hdr_sdb1 cascade");
-        runSql(connection, "create database hdr_sdb1");
-        runSql(connection, "use hdr_sdb1");
-
-        connection2 = cluster2.getClusterHelper().getHiveJdbcConnection();
-        runSql(connection2, "drop database if exists hdr_sdb1 cascade");
-        runSql(connection2, "create database hdr_sdb1");
-        runSql(connection2, "use hdr_sdb1");
-    }
-
-    @Test(dataProvider = "getRecipeLocation")
-    public void drPartition(final RecipeExecLocation recipeExecLocation) throws Exception {
-        setUp(recipeExecLocation);
-        final String tblName = "partitionDR";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'DELETE') values"
-                + "('this partition is going to be deleted - should NOT appear after dr')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'REPLACE') values"
-                + "('this partition is going to be replaced - should NOT appear after dr')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'ADD_DATA') values"
-                + "('this partition will have more data - should appear after dr')");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'NEW_PART') values"
-                + "('this partition has been added post bootstrap - should appear after dr')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'ADD_DATA') values"
-                + "('more data has been added post bootstrap - should appear after dr')");
-        runSql(connection,
-            "alter table " + tblName + " drop partition(pname = 'DELETE')");
-        runSql(connection,
-            "alter table " + tblName + " drop partition(pname = 'REPLACE')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'REPLACE') values"
-                + "('this partition has been replaced - should appear after dr')");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-
-        ExecResult execResult = cluster.getProcessHelper().getCLIMetrics(recipeMerlin.getName());
-        AssertUtil.assertCLIMetrics(execResult, recipeMerlin.getName(), 1, true);
-    }
-
-    @Test
-    public void drInsertOverwritePartition() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "drInsertOverwritePartition";
-        final String hlpTblName = "drInsertOverwritePartitionHelperTbl";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-        runSql(connection, "create table " + hlpTblName + "(comment string)");
-        runSql(connection,
-            "insert into table " + hlpTblName
-                + " values('overwrite data - should appear after dr')");
-        runSql(connection,
-            "insert into table " + hlpTblName + " values('newdata row2 - should appear after dr')");
-        runSql(connection,
-            "insert into table " + hlpTblName + " values('newdata row1 - should appear after dr')");
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'OLD_PART') values"
-                + "('this data should be retained - should appear after dr')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'OVERWRITE_PART') values"
-                + "('this data should get overwritten - should NOT appear after dr')");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert overwrite table " + tblName + " partition (pname = 'OVERWRITE_PART') "
-                + "select * from " + hlpTblName + " where comment REGEXP '^overwrite'");
-        runSql(connection,
-            "insert overwrite table " + tblName + " partition (pname = 'NEW_DATA') "
-                + "select * from " + hlpTblName + " where comment REGEXP '^newdata'");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    @Test
-    public void drTwoTablesOneRequest() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.TargetCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "firstTableDR";
-        final String tbl2Name = "secondTableDR";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName + ',' + tbl2Name);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        runSql(connection,
-            "create table " + tblName + "(comment string)");
-        runSql(connection,
-            "create table " + tbl2Name + "(comment string)");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-        bootstrapCopy(connection, clusterFS, tbl2Name, connection2, clusterFS2, tbl2Name);
-
-        runSql(connection,
-            "insert into table " + tblName + " values"
-                + "('this string has been added post bootstrap - should appear after dr')");
-        runSql(connection,
-            "insert into table " + tbl2Name + " values"
-                + "('this string has been added post bootstrap - should appear after dr')");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tbl2Name),
-            cluster2, clusterHC2.getTable(DB_NAME, tbl2Name), anAssert);
-        anAssert.assertAll();
-
-    }
-
-    @Test
-    public void drSerDeWithProperties() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "serdeTable";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) "
-                + "row format serde 'org.apache.hive.hcatalog.data.JsonSerDe'");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert into table " + tblName + " values"
-                + "('this string has been added post bootstrap - should appear after dr')");
-
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET SERDEPROPERTIES ('someProperty' = 'value')");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-
-    }
-
-    @Test
-    public void drChangeColumn() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "tableForColumnChange";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command1 = recipeMerlin.getSubmissionCommand();
-        final String recipe1Name = recipeMerlin.getName();
-        runSql(connection,
-            "create table " + tblName + "(id int)");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command1), 0, "Recipe submission failed.");
-        runSql(connection,
-            "ALTER TABLE " + tblName + " CHANGE id id STRING COMMENT 'some_comment'");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipe1Name, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-
-    @Test
-    public void drTwoDstTablesTwoRequests() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.TargetCluster;
-        setUp(recipeExecLocation);
-        final HCatClient clusterHC3 = cluster3.getClusterHelper().getHCatClient();
-        final Connection connection3 = cluster3.getClusterHelper().getHiveJdbcConnection();
-        runSql(connection3, "drop database if exists hdr_sdb1 cascade");
-        runSql(connection3, "create database hdr_sdb1");
-        runSql(connection3, "use hdr_sdb1");
-
-        final String tblName = "vanillaTable";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final String recipe1Name = recipeMerlin.getName();
-        final List<String> command1 = recipeMerlin.getSubmissionCommand();
-
-        final Bundle bundle3 = new Bundle(BundleUtil.readHCatBundle(), cluster3);
-        bundle3.generateUniqueBundle(this);
-        bundle3.submitClusters(prism);
-        recipeMerlin.withTargetCluster(bundle3.getClusterElement())
-                .withRecipeCluster(recipeExecLocation.getRecipeCluster(
-                        bundles[0].getClusterElement(), bundle3.getClusterElement()));
-        recipeMerlin.setUniqueName(this.getClass().getSimpleName());
-
-        final List<String> command2 = recipeMerlin.getSubmissionCommand();
-        final String recipe2Name = recipeMerlin.getName();
-
-        runSql(connection, "create table " + tblName + "(comment string)");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-        bootstrapCopy(connection, clusterFS, tblName, connection3, clusterFS3, tblName);
-
-        runSql(connection,
-            "insert into table " + tblName + " values"
-                + "('this string has been added post bootstrap - should appear after dr')");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command1), 0, "Recipe submission failed.");
-        Assert.assertEquals(Bundle.runFalconCLI(command2), 0, "Recipe submission failed.");
-
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipe1Name, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC3),
-            recipe2Name, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster3, clusterHC3.getTable(DB_NAME, tblName), anAssert);
-        anAssert.assertAll();
-    }
-
-    @Test
-    public void drExternalToNonExternal() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "externalToNonExternal";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        createExternalTable(connection, clusterFS, baseTestHDFSDir + "click_data/", tblName);
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        //change column name
-        runSql(connection,
-            "alter table " + tblName + " change column data data_new string");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert, false);
-        anAssert.assertNotEquals(clusterHC2.getTable(DB_NAME, tblName).getTabletype(),
-            clusterHC.getTable(DB_NAME, tblName).getTableName(),
-            "Source and destination tables should have different Tabletype");
-        anAssert.assertNotEquals(clusterHC2.getTable(DB_NAME, tblName).getTblProps().get("EXTERNAL"),
-            clusterHC.getTable(DB_NAME, tblName).getTblProps().get("EXTERNAL"),
-            "Source and destination tables should have different value of property EXTERNAL");
-        anAssert.assertAll();
-    }
-
-    @Test
-    public void drExtPartitionedToNonExtPartitioned() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "extPartitionedToNonExtPartitioned";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        createExternalPartitionedTable(connection, clusterFS,
-            baseTestHDFSDir + "click_data/", tblName);
-        runSql(connection2,
-            "create table " + tblName + " (data string, time string) partitioned by (date_ string)");
-        runSql(connection2, "alter table " + tblName + " add partition "
-            + "(date_='2001-01-01') location '" + baseTestHDFSDir + "click_data/2001-01-01/'");
-        runSql(connection2, "alter table " + tblName + " add partition "
-            + "(date_='2001-01-02') location '" + baseTestHDFSDir + "click_data/2001-01-02/'");
-
-        runSql(connection2, "insert into table " + tblName + " partition (date_='2001-01-01') "
-            + "values ('click1', '01:01:01')");
-        runSql(connection2, "insert into table " + tblName + " partition (date_='2001-01-02') "
-            + "values ('click2', '02:02:02')");
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert, false);
-
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        //change column name
-        runSql(connection,
-            "alter table " + tblName + " change column data data_new string");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert, false);
-        anAssert.assertNotEquals(clusterHC2.getTable(DB_NAME, tblName).getTabletype(),
-            clusterHC.getTable(DB_NAME, tblName).getTableName(),
-            "Source and destination tables should have different Tabletype");
-        anAssert.assertNotEquals(clusterHC2.getTable(DB_NAME, tblName).getTblProps().get("EXTERNAL"),
-            clusterHC.getTable(DB_NAME, tblName).getTblProps().get("EXTERNAL"),
-            "Source and destination tables should have different value of property EXTERNAL");
-        anAssert.assertAll();
-    }
-
-    /**
-     * 1 src tbl 1 dst tbl. Change table properties and comment at the source.
-     * Changes should get reflected at destination.
-     */
-    @Test
-    public void drChangeCommentAndPropertyTest() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "myTable";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        runSql(connection, "create table " + tblName + "(field string)");
-        //add new table property
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET TBLPROPERTIES('someProperty' = 'initialValue')");
-        //set comment
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET TBLPROPERTIES('comment' = 'this comment will be "
-                + "changed, SHOULD NOT appear')");
-
-        LOGGER.info(tblName + " before bootstrap copy: ");
-        runSql(connection, "describe extended " + tblName);
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        //change table property and comment
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET TBLPROPERTIES('someProperty' = 'anotherValue')");
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET TBLPROPERTIES('comment' = 'this comment should "
-                + "appear after replication done')");
-
-        LOGGER.info(tblName + " after modifications, before replication: ");
-        runSql(connection, "describe extended " + tblName);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    @Test
-    public void dataGeneration() throws Exception {
-        setUp(RecipeExecLocation.SourceCluster);
-        runSql(connection, "use hdr_sdb1");
-        createVanillaTable(connection, "store_sales");
-        createSerDeTable(connection);
-        createPartitionedTable(connection);
-        createExternalTable(connection, clusterFS,
-            baseTestHDFSDir + "click_data/", "click_data");
-        createExternalPartitionedTable(connection, clusterFS,
-            baseTestHDFSDir + "click_data2/", "click_data2");
-
-        runSql(connection2, "use hdr_sdb1");
-        createVanillaTable(connection2, "store_sales");
-        createSerDeTable(connection2);
-        createPartitionedTable(connection2);
-        createExternalTable(connection2, clusterFS2,
-            baseTestHDFSDir + "click_data/", "click_data");
-        createExternalPartitionedTable(connection2, clusterFS2,
-            baseTestHDFSDir + "click_data2/", "click_data2");
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertDbEqual(cluster, clusterHC.getDatabase("hdr_sdb1"),
-            cluster2, clusterHC2.getDatabase("hdr_sdb1"), anAssert);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable("hdr_sdb1", "click_data"),
-            cluster2, clusterHC2.getTable("hdr_sdb1", "click_data"), anAssert);
-        anAssert.assertAll();
-
-    }
-
-    @Test(enabled = false)
-    public void assertionTest() throws Exception {
-        setUp(RecipeExecLocation.SourceCluster);
-        final SoftAssert anAssert = new SoftAssert();
-        HiveAssert.assertTableEqual(
-            cluster, clusterHC.getTable("default", "hcatsmoke10546"),
-            cluster2, clusterHC2.getTable("default", "hcatsmoke10548"), anAssert);
-        HiveAssert.assertDbEqual(cluster, clusterHC.getDatabase("default"), cluster2,
-            clusterHC2.getDatabase("default"), anAssert);
-        anAssert.assertAll();
-    }
-
-    /**
-     * Test creates a table on first cluster using static partitioning. Then it creates the same
-     * table on the second cluster using dynamic partitioning. Finally it checks the equality of
-     * these tables.
-     * @throws SQLException
-     * @throws IOException
-     */
-    @Test
-    public void dynamicPartitionsTest() throws Exception {
-        setUp(RecipeExecLocation.SourceCluster);
-        //create table with static partitions on first cluster
-        createPartitionedTable(connection, false);
-
-        //create table with dynamic partitions on second cluster
-        createPartitionedTable(connection2, true);
-
-        //check that both tables are equal
-        HiveAssert.assertTableEqual(
-            cluster, clusterHC.getTable("hdr_sdb1", "global_store_sales"),
-            cluster2, clusterHC2.getTable("hdr_sdb1", "global_store_sales"), new SoftAssert()
-        ).assertAll();
-    }
-
-    /**
-     * 1 src tbl 1 dst tbl replication. Insert/delete/replace partitions using dynamic partition
-     * queries. The changes should get reflected at destination.
-     */
-    @Test
-    public void drInsertDropReplaceDynamicPartition() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "dynamicPartitionDR";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        //disable strict mode to use only dynamic partition
-        runSql(connection, "set hive.exec.dynamic.partition.mode=nonstrict");
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition is going to be deleted - should NOT appear after dr', 'DELETE')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition is going to be replaced - should NOT appear after dr', 'REPLACE')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition will have more data - should appear after dr', 'ADD_DATA')");
-
-        LOGGER.info(tblName + " before bootstrap copying: ");
-        runSql(connection, "select * from " + tblName);
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition has been added post bootstrap - should appear after dr', 'NEW_PART')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('more data has been added post bootstrap - should appear after dr', 'ADD_DATA')");
-        runSql(connection,
-            "alter table " + tblName + " drop partition(pname = 'DELETE')");
-        runSql(connection,
-            "alter table " + tblName + " drop partition(pname = 'REPLACE')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition has been replaced - should appear after dr', 'REPLACE')");
-
-        LOGGER.info(tblName + " after modifications, before replication: ");
-        runSql(connection, "select * from " + tblName);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    /**
-     * 1 src tbl 1 dst tbl replication. Insert/overwrite partitions using dynamic partitions
-     * queries. The changes should get reflected at destination.
-     * @throws Exception
-     */
-    @Test
-    public void drInsertOverwriteDynamicPartition() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "drInsertOverwritePartition";
-        final String hlpTblName = "drInsertOverwritePartitionHelperTbl";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        //disable strict mode to use only dynamic partition
-        runSql(connection, "set hive.exec.dynamic.partition.mode=nonstrict");
-
-        runSql(connection,
-            "create table " + hlpTblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + hlpTblName + " partition (pname)"
-                + " values('overwrite data - should appear after dr', 'OVERWRITE_PART')");
-        runSql(connection,
-            "insert into table " + hlpTblName + " partition (pname)"
-            + " values('newdata row2 - should appear after dr', 'NEW_DATA')");
-        runSql(connection,
-            "insert into table " + hlpTblName + " partition (pname)"
-                + " values('newdata row1 - should appear after dr', 'NEW_DATA')");
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this data should be retained - should appear after dr', 'OLD_PART')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this data should get overwritten - should NOT appear after dr', 'OVERWRITE_PART')");
-
-        LOGGER.info(tblName + " before bootstrap copying: ");
-        runSql(connection, "select * from " + tblName);
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert overwrite table " + tblName + " partition (pname) "
-                + "select comment, pname from " + hlpTblName + " where comment REGEXP '^overwrite'");
-        runSql(connection,
-            "insert overwrite table " + tblName + " partition (pname) "
-                + "select comment, pname from " + hlpTblName + " where comment REGEXP '^newdata'");
-
-        LOGGER.info(tblName + " after modifications, before replication: ");
-        runSql(connection, "select * from " + tblName);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    /**
-     * Run recipe with different frequencies. Submission should go through.
-     * Check frequency of the launched oozie job
-     */
-    @Test(dataProvider = "frequencyGenerator")
-    public void differentRecipeFrequenciesTest(String frequency) throws Exception {
-        setUp(RecipeExecLocation.SourceCluster);
-        LOGGER.info("Testing with frequency: " + frequency);
-        String tblName = "myTable";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName)
-            .withFrequency(new Frequency(frequency));
-        runSql(connection, "create table " + tblName + "(comment string)");
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-        LOGGER.info("Submission went through.");
-
-        InstanceUtil.waitTillInstanceReachState(clusterOC, recipeMerlin.getName(), 1,
-            CoordinatorAction.Status.RUNNING, EntityType.PROCESS);
-        String filter = "name=FALCON_PROCESS_" + recipeMerlin.getName();
-        List<BundleJob> bundleJobs = OozieUtil.getBundles(clusterOC, filter, 0, 10);
-        List<String> bundleIds = OozieUtil.getBundleIds(bundleJobs);
-        String bundleId = OozieUtil.getMaxId(bundleIds);
-        List<CoordinatorJob> coords = clusterOC.getBundleJobInfo(bundleId).getCoordinators();
-        List<String> cIds = new ArrayList<String>();
-        for (CoordinatorJob coord : coords) {
-            cIds.add(coord.getId());
-        }
-        String coordId = OozieUtil.getMinId(cIds);
-        CoordinatorJob job = clusterOC.getCoordJobInfo(coordId);
-        CoordinatorJob.Timeunit timeUnit = job.getTimeUnit();
-        String freq = job.getFrequency();
-        LOGGER.info("Frequency of running job: " + timeUnit + " " + freq);
-        String unit = timeUnit.name().toLowerCase().replace("_", "");
-        if (frequency.contains("hours")) {
-            unit = "hours";
-            freq = String.valueOf(Integer.parseInt(freq) / 60);
-        }
-        Assert.assertTrue(frequency.contains(unit)
-            && frequency.contains(freq), "Running job has different frequency.");
-    }
-
-    @DataProvider(name = "frequencyGenerator")
-    public Object[][] frequencyGenerator() {
-        return new Object[][]{{"minutes(10)"}, {"minutes(10000)"}, {"hours(5)"}, {"hours(5000)"},
-            {"days(3)"}, {"days(3000)"}, {"months(1)"}, {"months(1000)"}, };
-    }
-
-    @AfterMethod(alwaysRun = true)
-    public void tearDown() throws IOException {
-        try {
-            prism.getProcessHelper().deleteByName(recipeMerlin.getName(), null);
-        } catch (Exception e) {
-            LOGGER.info("Deletion of process: " + recipeMerlin.getName() + " failed with exception: " + e);
-        }
-        removeTestClassEntities();
-        cleanTestsDirs();
-    }
-
-}