You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@falcon.apache.org by pr...@apache.org on 2016/08/26 12:24:53 UTC

[1/2] falcon git commit: FALCON-2127 Falcon regression compilation issue and deleting depricated recipe test cases

Repository: falcon
Updated Branches:
  refs/heads/master 35b10b438 -> c00975e41


http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDbDRTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDbDRTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDbDRTest.java
deleted file mode 100644
index e281bee..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDbDRTest.java
+++ /dev/null
@@ -1,279 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.hive.dr;
-
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.Entities.RecipeMerlin;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.supportClasses.NotifyingAssert;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.Config;
-import org.apache.falcon.regression.core.util.HadoopUtil;
-import org.apache.falcon.regression.core.util.HiveAssert;
-import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.MatrixUtil;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.testHelper.BaseTestClass;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hive.hcatalog.api.HCatClient;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.CoordinatorAction;
-import org.apache.oozie.client.OozieClient;
-import org.testng.Assert;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.util.List;
-
-import static org.apache.falcon.regression.core.util.HiveUtil.runSql;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.bootstrapCopy;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createVanillaTable;
-
-/**
- * Hive DR Testing for Hive database replication.
- */
-@Test(groups = {"embedded", "multiCluster"})
-public class HiveDbDRTest extends BaseTestClass {
-    private static final Logger LOGGER = Logger.getLogger(HiveDbDRTest.class);
-    private final ColoHelper cluster = servers.get(0);
-    private final ColoHelper cluster2 = servers.get(1);
-    private final FileSystem clusterFS = serverFS.get(0);
-    private final FileSystem clusterFS2 = serverFS.get(1);
-    private final OozieClient clusterOC = serverOC.get(0);
-    private final OozieClient clusterOC2 = serverOC.get(1);
-    private HCatClient clusterHC;
-    private HCatClient clusterHC2;
-    private RecipeMerlin recipeMerlin;
-    private Connection connection;
-    private Connection connection2;
-
-    @DataProvider
-    public Object[][] getRecipeLocation() {
-        return MatrixUtil.crossProduct(RecipeExecLocation.values());
-    }
-
-    private void setUp(RecipeExecLocation recipeExecLocation) throws Exception {
-        clusterHC = cluster.getClusterHelper().getHCatClient();
-        clusterHC2 = cluster2.getClusterHelper().getHCatClient();
-        bundles[0] = new Bundle(BundleUtil.readHCatBundle(), cluster);
-        bundles[1] = new Bundle(BundleUtil.readHCatBundle(), cluster2);
-        bundles[0].generateUniqueBundle(this);
-        bundles[1].generateUniqueBundle(this);
-        final ClusterMerlin srcCluster = bundles[0].getClusterElement();
-        final ClusterMerlin tgtCluster = bundles[1].getClusterElement();
-        Bundle.submitCluster(recipeExecLocation.getRecipeBundle(bundles[0], bundles[1]));
-
-        String recipeDir = "HiveDrRecipe";
-        if (MerlinConstants.IS_SECURE) {
-            recipeDir = "HiveDrSecureRecipe";
-        }
-        recipeMerlin = RecipeMerlin.readFromDir(recipeDir, FalconCLI.RecipeOperation.HIVE_DISASTER_RECOVERY)
-            .withRecipeCluster(recipeExecLocation.getRecipeCluster(srcCluster, tgtCluster));
-        recipeMerlin.withSourceCluster(srcCluster)
-            .withTargetCluster(tgtCluster)
-            .withFrequency(new Frequency("5", Frequency.TimeUnit.minutes))
-            .withValidity(TimeUtil.getTimeWrtSystemTime(-1), TimeUtil.getTimeWrtSystemTime(11));
-        recipeMerlin.setUniqueName(this.getClass().getSimpleName());
-
-        connection = cluster.getClusterHelper().getHiveJdbcConnection();
-
-        connection2 = cluster2.getClusterHelper().getHiveJdbcConnection();
-    }
-
-    private void setUpDb(String dbName, Connection conn) throws SQLException {
-        runSql(conn, "drop database if exists " + dbName + " cascade");
-        runSql(conn, "create database " + dbName);
-        runSql(conn, "use " + dbName);
-    }
-
-    @Test(dataProvider = "getRecipeLocation")
-    public void drDbDropDb(final RecipeExecLocation recipeExecLocation) throws Exception {
-        setUp(recipeExecLocation);
-        final String dbName = "drDbDropDb";
-        setUpDb(dbName, connection);
-        setUpDb(dbName, connection2);
-        recipeMerlin.withSourceDb(dbName).withSourceTable("*");
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        runSql(connection, "drop database " + dbName);
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        final List<String> dstDbs = runSql(connection2, "show databases");
-        Assert.assertFalse(dstDbs.contains(dbName), "dstDbs = " + dstDbs + " was not expected to "
-            + "contain " + dbName);
-    }
-
-
-    @Test(dataProvider = "isDBReplication")
-    public void drDbFailPass(Boolean isDBReplication) throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String dbName = "drDbFailPass";
-        final String tblName = "vanillaTable";
-        final String hiveWarehouseLocation = Config.getProperty("hive.warehouse.location", "/apps/hive/warehouse/");
-        final String dbPath = HadoopUtil.joinPath(hiveWarehouseLocation, dbName.toLowerCase() + ".db");
-        setUpDb(dbName, connection);
-        runSql(connection, "create table " + tblName + "(data string)");
-        setUpDb(dbName, connection2);
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        recipeMerlin.withSourceDb(dbName).withSourceTable(isDBReplication ? "*" : tblName);
-
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        runSql(connection, "insert into table " + tblName + " values('cannot be replicated now')");
-        final String noReadWritePerm = "d---r-xr-x";
-        LOGGER.info("Setting " + clusterFS2.getUri() + dbPath + " to : " + noReadWritePerm);
-        clusterFS2.setPermission(new Path(dbPath), FsPermission.valueOf(noReadWritePerm));
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.KILLED, EntityType.PROCESS);
-
-        final String readWritePerm = "drwxr-xr-x";
-        LOGGER.info("Setting " + clusterFS2.getUri() + dbPath + " to : " + readWritePerm);
-        clusterFS2.setPermission(new Path(dbPath), FsPermission.valueOf(readWritePerm));
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(dbName, tblName),
-            cluster2, clusterHC2.getTable(dbName, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    @Test
-    public void drDbAddDropTable() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String dbName = "drDbAddDropTable";
-        final String tblToBeDropped = "table_to_be_dropped";
-        final String tblToBeDroppedAndAdded = "table_to_be_dropped_and_readded";
-        final String newTableToBeAdded = "new_table_to_be_added";
-
-        setUpDb(dbName, connection);
-        setUpDb(dbName, connection2);
-        recipeMerlin.withSourceDb(dbName).withSourceTable("*")
-            .withFrequency(new Frequency("2", Frequency.TimeUnit.minutes));
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        createVanillaTable(connection, tblToBeDropped);
-        createVanillaTable(connection, tblToBeDroppedAndAdded);
-        bootstrapCopy(connection, clusterFS, tblToBeDropped,
-            connection2, clusterFS2, tblToBeDropped);
-        bootstrapCopy(connection, clusterFS, tblToBeDroppedAndAdded,
-            connection2, clusterFS2, tblToBeDroppedAndAdded);
-
-        /* For first replication - two tables are dropped & one table is added */
-        runSql(connection, "drop table " + tblToBeDropped);
-        runSql(connection, "drop table " + tblToBeDroppedAndAdded);
-        createVanillaTable(connection, newTableToBeAdded);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertDbEqual(cluster, clusterHC.getDatabase(dbName),
-            cluster2, clusterHC2.getDatabase(dbName), anAssert);
-
-        /* For second replication - a dropped tables is added back */
-        createVanillaTable(connection, tblToBeDroppedAndAdded);
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 2, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertDbEqual(cluster, clusterHC.getDatabase(dbName),
-            cluster2, clusterHC2.getDatabase(dbName), anAssert);
-        anAssert.assertAll();
-    }
-
-    @Test
-    public void drDbNonReplicatableTable() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String dbName = "drDbNonReplicatableTable";
-        final String tblName = "vanillaTable";
-        final String tblView = "vanillaTableView";
-        final String tblOffline = "offlineTable";
-
-        setUpDb(dbName, connection);
-        setUpDb(dbName, connection2);
-        recipeMerlin.withSourceDb(dbName).withSourceTable("*")
-            .withFrequency(new Frequency("2", Frequency.TimeUnit.minutes));
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        createVanillaTable(connection, tblName);
-        runSql(connection, "create view " + tblView + " as select * from " + tblName);
-        createVanillaTable(connection, tblOffline);
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-        bootstrapCopy(connection, clusterFS, tblOffline, connection2, clusterFS2, tblOffline);
-        final String newComment = "'new comment for offline table should not reach destination'";
-        runSql(connection,
-            "alter table " + tblOffline + " set tblproperties ('comment' =" + newComment +")");
-        runSql(connection, "alter table " + tblOffline + " enable offline");
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        //vanilla table gets replicated, offline table & view are not replicated
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(dbName, tblName),
-            cluster2, clusterHC2.getTable(dbName, tblName), new NotifyingAssert(true)).assertAll();
-        final List<String> dstTables = runSql(connection2, "show tables");
-        Assert.assertFalse(dstTables.contains(tblView),
-            "dstTables = " + dstTables + " was not expected to contain " + tblView);
-        final List<String> dstComment =
-            runSql(connection2, "show tblproperties " + tblOffline + "('comment')");
-        Assert.assertFalse(dstComment.contains(newComment),
-            tblOffline + " comment = " + dstComment + " was not expected to contain " + newComment);
-    }
-
-    @AfterMethod(alwaysRun = true)
-    public void tearDown() throws IOException {
-        try {
-            prism.getProcessHelper().deleteByName(recipeMerlin.getName(), null);
-        } catch (Exception e) {
-            LOGGER.info("Deletion of process: " + recipeMerlin.getName() + " failed with exception: " + e);
-        }
-        removeTestClassEntities();
-        cleanTestsDirs();
-    }
-
-    @DataProvider
-    public Object[][] isDBReplication() {
-        return new Object[][]{{Boolean.TRUE}, {Boolean.FALSE}};
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveObjectCreator.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveObjectCreator.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveObjectCreator.java
deleted file mode 100644
index 9eb389a..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveObjectCreator.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.hive.dr;
-
-import org.apache.falcon.regression.core.util.HadoopUtil;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.log4j.Logger;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
-
-import static org.apache.falcon.regression.core.util.HadoopUtil.writeDataForHive;
-import static org.apache.falcon.regression.core.util.HiveUtil.runSql;
-
-/**
- * Create Hive tables for testing Hive DR. Note that this is not expected to be used out of
- * HiveDR tests.
- */
-final class HiveObjectCreator {
-    private static final Logger LOGGER = Logger.getLogger(HiveObjectCreator.class);
-    private static final String HDFS_TMP_DIR = "/tmp/hive_objects/";
-
-    private HiveObjectCreator() {
-        throw new AssertionError("Instantiating utility class...");
-    }
-
-    static void bootstrapCopy(Connection srcConnection, FileSystem srcFs, String srcTable,
-                              Connection dstConnection, FileSystem dstFs, String dstTable) throws Exception {
-        LOGGER.info("Starting bootstrap...");
-        final String dumpPath = HDFS_TMP_DIR + srcTable + "/";
-        HadoopUtil.recreateDir(srcFs, dumpPath);
-        runSqlQuietly(srcConnection, "dfs -chmod -R 777 " + dumpPath);
-        HadoopUtil.deleteDirIfExists(dumpPath, dstFs);
-        runSql(srcConnection, "export table " + srcTable + " to '" + dumpPath + "' FOR REPLICATION('ignore')");
-        FileUtil.copy(srcFs, new Path(dumpPath), dstFs, new Path(dumpPath), false, true, new Configuration());
-        runSqlQuietly(dstConnection, "dfs -chmod -R 777 " + dumpPath);
-        runSql(dstConnection, "import table " + dstTable + " from '" + dumpPath + "'");
-        HadoopUtil.deleteDirIfExists(dumpPath, srcFs);
-        HadoopUtil.deleteDirIfExists(dumpPath, dstFs);
-        LOGGER.info("Finished bootstrap");
-    }
-
-    /* We need to delete it using hive query as the created directory is owned by hive.*/
-    private static void runSqlQuietly(Connection srcConnection, String sql) {
-        try {
-            runSql(srcConnection, sql);
-        } catch (SQLException ex) {
-            LOGGER.info("Exception while hive ql execution: " + ex.getMessage());
-        }
-    }
-
-    /**
-     * Create an external table.
-     * @param connection jdbc connection object to use for issuing queries to hive
-     * @param fs filesystem object to upload the data
-     * @param clickDataLocation location to upload the data to
-     * @throws IOException
-     * @throws SQLException
-     */
-    static void createExternalTable(Connection connection, FileSystem fs, String
-        clickDataLocation, String tableName) throws IOException, SQLException {
-        HadoopUtil.deleteDirIfExists(clickDataLocation, fs);
-        fs.mkdirs(new Path(clickDataLocation));
-        fs.setPermission(new Path(clickDataLocation), FsPermission.getDirDefault());
-        writeDataForHive(fs, clickDataLocation,
-            new StringBuffer("click1").append((char) 0x01).append("01:01:01").append("\n")
-                .append("click2").append((char) 0x01).append("02:02:02"), true);
-        //clusterFS.setPermission(new Path(clickDataPart2), FsPermission.getFileDefault());
-        runSql(connection, "create external table " + tableName
-            + " (data string, time string) "
-            + "location '" + clickDataLocation + "'");
-        runSql(connection, "select * from " + tableName);
-    }
-
-
-    /**
-     * Create an external table.
-     * @param connection jdbc connection object to use for issuing queries to hive
-     * @param fs filesystem object to upload the data
-     * @param clickDataLocation location to upload the data to
-     * @throws IOException
-     * @throws SQLException
-     */
-    static void createExternalPartitionedTable(Connection connection, FileSystem fs, String
-        clickDataLocation, String tableName) throws IOException, SQLException {
-        final String clickDataPart1 = clickDataLocation + "2001-01-01/";
-        final String clickDataPart2 = clickDataLocation + "2001-01-02/";
-        fs.mkdirs(new Path(clickDataLocation));
-        fs.setPermission(new Path(clickDataLocation), FsPermission.getDirDefault());
-        writeDataForHive(fs, clickDataPart1,
-            new StringBuffer("click1").append((char) 0x01).append("01:01:01"), true);
-        writeDataForHive(fs, clickDataPart2,
-            new StringBuffer("click2").append((char) 0x01).append("02:02:02"), true);
-        //clusterFS.setPermission(new Path(clickDataPart2), FsPermission.getFileDefault());
-        runSql(connection, "create external table " + tableName
-            + " (data string, time string) partitioned by (date_ string) "
-            + "location '" + clickDataLocation + "'");
-        runSql(connection, "alter table " + tableName + " add partition "
-            + "(date_='2001-01-01') location '" + clickDataPart1 + "'");
-        runSql(connection, "alter table " + tableName + " add partition "
-            + "(date_='2001-01-02') location '" + clickDataPart2 + "'");
-        runSql(connection, "select * from " + tableName);
-    }
-
-    /**
-     * Create an partitioned table.
-     * @param connection jdbc connection object to use for issuing queries to hive
-     * @throws SQLException
-     */
-    static void createPartitionedTable(Connection connection) throws SQLException {
-        runSql(connection, "create table global_store_sales "
-            + "(customer_id string, item_id string, quantity float, price float, time timestamp) "
-            + "partitioned by (country string)");
-        runSql(connection,
-            "insert into table global_store_sales partition (country = 'us') values"
-                + "('c1', 'i1', '1', '1', '2001-01-01 01:01:01')");
-        runSql(connection,
-            "insert into table global_store_sales partition (country = 'uk') values"
-                + "('c2', 'i2', '2', '2', '2001-01-01 01:01:02')");
-        runSql(connection, "select * from global_store_sales");
-    }
-
-    /**
-     * Create an plain old table.
-     * @param connection jdbc connection object to use for issuing queries to hive
-     * @param tblName
-     * @throws SQLException
-     */
-    static void createVanillaTable(Connection connection, String tblName) throws SQLException {
-        //vanilla table
-        runSql(connection, "create table " + tblName
-            + "(customer_id string, item_id string, quantity float, price float, time timestamp)");
-        runSql(connection, "insert into table " + tblName + " values "
-            + "('c1', 'i1', '1', '1', '2001-01-01 01:01:01'), "
-            + "('c2', 'i2', '2', '2', '2001-01-01 01:01:02')");
-        runSql(connection, "select * from " + tblName);
-    }
-
-    /**
-     * Create a partitioned table with either dynamic or static partitions.
-     * @param connection jdbc connection object to use for issuing queries to hive
-     * @param dynamic should partitions be added in dynamic or static way
-     * @throws SQLException
-     */
-    static void createPartitionedTable(Connection connection,
-                                       boolean dynamic) throws SQLException {
-        String [][] partitions = {
-            {"us", "Kansas", },
-            {"us", "California", },
-            {"au", "Queensland", },
-            {"au", "Victoria", },
-        };
-        //create table
-        runSql(connection, "drop table global_store_sales");
-        runSql(connection, "create table global_store_sales(customer_id string,"
-            + " item_id string, quantity float, price float, time timestamp) "
-            + "partitioned by (country string, state string)");
-        //provide data
-        String query;
-        if (dynamic) {
-            //disable strict mode, thus both partitions can be used as dynamic
-            runSql(connection, "set hive.exec.dynamic.partition.mode=nonstrict");
-            query = "insert into table global_store_sales partition"
-                + "(country, state) values('c%3$s', 'i%3$s', '%3$s', '%3$s', "
-                + "'2001-01-01 01:01:0%3$s', '%1$s', '%2$s')";
-        } else {
-            query = "insert into table global_store_sales partition"
-                + "(country = '%1$s', state = '%2$s') values('c%3$s', 'i%3$s', '%3$s', '%3$s', "
-                + "'2001-01-01 01:01:0%3$s')";
-        }
-        for (int i = 0; i < partitions.length; i++) {
-            runSql(connection, String.format(query, partitions[i][0], partitions[i][1], i + 1));
-        }
-        runSql(connection, "select * from global_store_sales");
-    }
-
-    static void createSerDeTable(Connection connection) throws SQLException {
-        runSql(connection, "create table store_json "
-            + "(customer_id string, item_id string, quantity float, price float, time timestamp) "
-            + "row format serde 'org.apache.hive.hcatalog.data.JsonSerDe' ");
-        runSql(connection, "insert into table store_json values "
-            + "('c1', 'i1', '1', '1', '2001-01-01 01:01:01'), "
-            + "('c2', 'i2', '2', '2', '2001-01-01 01:01:02')");
-        runSql(connection, "select * from store_json");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/RecipeExecLocation.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/RecipeExecLocation.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/RecipeExecLocation.java
deleted file mode 100644
index a124082..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/RecipeExecLocation.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.hive.dr;
-
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.oozie.client.OozieClient;
-
-/**
- * Enum to represent location of recipe execution.
- */
-enum RecipeExecLocation {
-    SourceCluster {
-        protected OozieClient getRecipeOC(OozieClient srcOC, OozieClient tgtOC) {
-            return srcOC;
-        }
-        protected ClusterMerlin getRecipeCluster(ClusterMerlin srcCM, ClusterMerlin tgtCM) {
-            return srcCM;
-        }
-        protected Bundle getRecipeBundle(Bundle srcBundle, Bundle tgtBundle) {
-            return srcBundle;
-        }
-    },
-    TargetCluster {
-        protected OozieClient getRecipeOC(OozieClient srcOC, OozieClient tgtOC) {
-            return tgtOC;
-        }
-        protected ClusterMerlin getRecipeCluster(ClusterMerlin srcCM, ClusterMerlin tgtCM) {
-            return tgtCM;
-        }
-        protected Bundle getRecipeBundle(Bundle srcBundle, Bundle tgtBundle) {
-            return tgtBundle;
-        }
-    };
-
-    /** Get oozie client for the Oozie that is going to run the recipe.
-     * @param srcOC the oozie client for the source cluster
-     * @param tgtOC the oozie client for the target cluster
-     * @return oozie client for the Oozie that is going to run the recipe
-     */
-    abstract OozieClient getRecipeOC(OozieClient srcOC, OozieClient tgtOC);
-
-    abstract ClusterMerlin getRecipeCluster(ClusterMerlin srcCM, ClusterMerlin tgtCM);
-
-    abstract Bundle getRecipeBundle(Bundle srcBundle, Bundle tgtBundle);
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorSourceTargetOptionsTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorSourceTargetOptionsTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorSourceTargetOptionsTest.java
deleted file mode 100644
index 8bec758..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorSourceTargetOptionsTest.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.searchUI;
-
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.testHelper.BaseUITestClass;
-import org.apache.falcon.regression.ui.search.LoginPage;
-import org.apache.falcon.regression.ui.search.MirrorWizardPage;
-import org.apache.falcon.regression.ui.search.MirrorWizardPage.Location;
-import org.apache.falcon.regression.ui.search.SearchPage;
-import org.apache.falcon.resource.EntityList;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.util.EnumSet;
-import java.util.Set;
-import java.util.TreeSet;
-
-/** UI tests for mirror creation. */
-@Test(groups = "search-ui")
-public class MirrorSourceTargetOptionsTest extends BaseUITestClass{
-    private final ColoHelper cluster = servers.get(0);
-    private SearchPage searchPage;
-    private MirrorWizardPage mirrorPage;
-    private MirrorWizardPage.ClusterBlock source;
-    private MirrorWizardPage.ClusterBlock target;
-
-    @BeforeClass(alwaysRun = true)
-    public void setup() throws Exception {
-        openBrowser();
-        searchPage = LoginPage.open(getDriver()).doDefaultLogin();
-        bundles[0] = BundleUtil.readELBundle();
-        bundles[0] = new Bundle(bundles[0], cluster);
-        bundles[0].generateUniqueBundle(this);
-        bundles[0].submitClusters(cluster);
-
-    }
-
-    @BeforeMethod(alwaysRun = true)
-    public void refreshMirrorPage() throws Exception {
-        searchPage.refresh();
-        mirrorPage = searchPage.getPageHeader().doCreateMirror();
-        source = mirrorPage.getSourceBlock();
-        target = mirrorPage.getTargetBlock();
-    }
-
-
-    @Test
-    public void testExclusiveWhereToRunJob() {
-        source.selectRunHere();
-        target.selectRunHere();
-        Assert.assertFalse(source.isRunHereSelected(), "'Run job here' shouldn't be selected on Source");
-        Assert.assertTrue(target.isRunHereSelected(), "'Run job here' should be selected on Target");
-
-        source.selectRunHere();
-        Assert.assertTrue(source.isRunHereSelected(), "'Run job here' should be selected on Source");
-        Assert.assertFalse(target.isRunHereSelected(), "'Run job here' shouldn't be selected on Target");
-
-        mirrorPage.setMirrorType(FalconCLI.RecipeOperation.HIVE_DISASTER_RECOVERY);
-
-        target.selectRunHere();
-        Assert.assertFalse(source.isRunHereSelected(), "'Run job here' shouldn't be selected on Source");
-        Assert.assertTrue(target.isRunHereSelected(), "'Run job here' should be selected on Target");
-
-        source.selectRunHere();
-        Assert.assertTrue(source.isRunHereSelected(), "'Run job here' should be selected on Source");
-        Assert.assertFalse(target.isRunHereSelected(), "'Run job here' shouldn't be selected on Target");
-
-        mirrorPage.setMirrorType(FalconCLI.RecipeOperation.HDFS_REPLICATION);
-        source.setLocationType(Location.AZURE);
-        Assert.assertFalse(source.isRunHereAvailable(),
-                "'Run job here' shouldn't be available on source if Source=Azure");
-
-        source.setLocationType(Location.S3);
-        Assert.assertFalse(source.isRunHereAvailable(),
-                "'Run job here' shouldn't be available on source if Source=S3");
-
-        source.setLocationType(Location.HDFS);
-        target.setLocationType(Location.AZURE);
-        Assert.assertFalse(target.isRunHereAvailable(),
-                "'Run job here' shouldn't be available on target if Target=Azure");
-
-        target.setLocationType(Location.S3);
-        Assert.assertFalse(target.isRunHereAvailable(),
-                "'Run job here' shouldn't be available on target if Target=S3");
-
-    }
-
-    @Test
-    public void testExclusiveFSOptions() {
-        source.setLocationType(Location.HDFS);
-        Assert.assertEquals(target.getAvailableLocationTypes(),
-                EnumSet.allOf(Location.class), "All target types should be available if source=HDFS");
-
-
-        source.setLocationType(Location.AZURE);
-        Assert.assertEquals(target.getAvailableLocationTypes(),
-                EnumSet.of(Location.HDFS), "Only HDFS should be available as target if source=Azure");
-
-        source.setLocationType(Location.S3);
-        Assert.assertEquals(target.getAvailableLocationTypes(),
-                EnumSet.of(Location.HDFS), "Only HDFS should be available as target if source=S3");
-
-        source.setLocationType(Location.HDFS);
-        target.setLocationType(Location.HDFS);
-        Assert.assertEquals(target.getAvailableLocationTypes(),
-                EnumSet.allOf(Location.class), "All source types should be available if target=HDFS");
-
-
-        target.setLocationType(Location.AZURE);
-        Assert.assertEquals(source.getAvailableLocationTypes(),
-                EnumSet.of(Location.HDFS), "Only HDFS should be available as source if target=Azure");
-
-        target.setLocationType(Location.S3);
-        Assert.assertEquals(source.getAvailableLocationTypes(),
-                EnumSet.of(Location.HDFS), "Only HDFS should be available as source if target=S3");
-    }
-
-    @Test
-    public void testClustersDropDownList() throws Exception {
-        //add more clusters
-        ClusterMerlin clusterMerlin = bundles[0].getClusterElement();
-        String clusterName = clusterMerlin.getName() + '-';
-        for (int i = 0; i < 5; i++) {
-            clusterMerlin.setName(clusterName + i);
-            prism.getClusterHelper().submitEntity(clusterMerlin.toString());
-        }
-        EntityList result =
-            prism.getClusterHelper().listAllEntities().getEntityList();
-        Assert.assertNotNull(result.getElements(),
-            "There should be more than 5 clusters in result");
-        Set<String> apiClusterNames = new TreeSet<>();
-        for (EntityList.EntityElement element : result.getElements()) {
-            apiClusterNames.add(element.name);
-        }
-
-        //refresh page to get new clusters on UI
-        refreshMirrorPage();
-
-        mirrorPage.setMirrorType(FalconCLI.RecipeOperation.HDFS_REPLICATION);
-        source.setLocationType(Location.HDFS);
-        target.setLocationType(Location.HDFS);
-
-        Assert.assertEquals(source.getAvailableClusters(), apiClusterNames,
-            "Clusters available via API are not the same as on Source for HDFS replication");
-        Assert.assertEquals(target.getAvailableClusters(), apiClusterNames,
-            "Clusters available via API are not the same as on Target for HDFS replication");
-
-        mirrorPage.setMirrorType(FalconCLI.RecipeOperation.HIVE_DISASTER_RECOVERY);
-
-        Assert.assertEquals(source.getAvailableClusters(), apiClusterNames,
-            "Clusters available via API are not the same as on Source for HIVE replication");
-        Assert.assertEquals(target.getAvailableClusters(), apiClusterNames,
-            "Clusters available via API are not the same as on Target for HIVE replication");
-    }
-
-    @Test
-    public void testInvalidValidity() {
-        mirrorPage.setName(bundles[0].getProcessName());
-        mirrorPage.setMirrorType(FalconCLI.RecipeOperation.HDFS_REPLICATION);
-        String baseTestDir = cleanAndGetTestDir();
-        source.setPath(baseTestDir);
-        source.selectCluster(bundles[0].getClusterNames().get(0));
-        target.setPath(baseTestDir);
-        target.selectCluster(bundles[0].getClusterNames().get(0));
-
-        mirrorPage.setStartTime("2010-01-01T02:00Z");
-        mirrorPage.setEndTime("2010-01-01T01:00Z");
-        mirrorPage.next();
-        mirrorPage.save();
-        Assert.assertTrue(mirrorPage.getActiveAlertText().contains("should be before process end"),
-            "Warning about wrong Validity should be present");
-        //check the same through notification bar
-        mirrorPage.getPageHeader().validateNotificationCountAndCheckLast(1, "should be before process end");
-    }
-
-    @AfterClass(alwaysRun = true)
-    public void tearDownClass() {
-        removeTestClassEntities();
-        closeBrowser();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorSummaryTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorSummaryTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorSummaryTest.java
deleted file mode 100644
index ce014ef..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorSummaryTest.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.searchUI;
-
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.entity.v0.process.PolicyType;
-import org.apache.falcon.entity.v0.process.Retry;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.testHelper.BaseUITestClass;
-import org.apache.falcon.regression.ui.search.LoginPage;
-import org.apache.falcon.regression.ui.search.MirrorWizardPage;
-import org.apache.falcon.regression.ui.search.MirrorWizardPage.Summary;
-import org.apache.falcon.regression.ui.search.SearchPage;
-import org.apache.log4j.Logger;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.util.EnumMap;
-import java.util.Map;
-
-
-/** UI tests for mirror creation. */
-@Test(groups = "search-ui")
-public class MirrorSummaryTest extends BaseUITestClass{
-    private static final Logger LOGGER = Logger.getLogger(MirrorSummaryTest.class);
-
-    private final ColoHelper cluster = servers.get(0);
-    private SearchPage searchPage;
-    private MirrorWizardPage mirrorPage;
-    private String baseTestDir = cleanAndGetTestDir();
-    private String start = "2010-01-01T01:00Z";
-    private String end = "2010-01-01T02:00Z";
-    private Map<Summary, String> baseMap;
-
-    @BeforeClass(alwaysRun = true)
-    public void setupClass() throws Exception {
-        baseMap = new EnumMap<>(Summary.class);
-        baseMap.put(Summary.MAX_MAPS, "5");
-        baseMap.put(Summary.MAX_BANDWIDTH, "100");
-        baseMap.put(Summary.ACL_OWNER, LoginPage.UI_DEFAULT_USER);
-        baseMap.put(Summary.ACL_GROUP, "users");
-        baseMap.put(Summary.ACL_PERMISSIONS, "0755");
-        baseMap.put(Summary.RETRY_POLICY, "periodic");
-        baseMap.put(Summary.RETRY_DELAY, "30 minutes");
-        baseMap.put(Summary.RETRY_ATTEMPTS, "3");
-        baseMap.put(Summary.FREQUENCY, "5 minutes");
-        baseMap.put(Summary.SOURCE_PATH, baseTestDir);
-        baseMap.put(Summary.TARGET_PATH, baseTestDir);
-        baseMap.put(Summary.START, start);
-        baseMap.put(Summary.END, end);
-
-        //HDFS is default mirror type
-        baseMap.put(Summary.TYPE, "HDFS");
-        baseMap.put(Summary.TAGS, "_falcon_mirroring_type - HDFS");
-        baseMap.put(Summary.SOURCE_LOCATION, "HDFS");
-        baseMap.put(Summary.TARGET_LOCATION, "HDFS");
-
-        openBrowser();
-        searchPage = LoginPage.open(getDriver()).doDefaultLogin();
-    }
-
-    @BeforeMethod(alwaysRun = true)
-    public void setup() throws Exception {
-        removeTestClassEntities();
-        bundles[0] = BundleUtil.readELBundle();
-        bundles[0] = new Bundle(bundles[0], cluster);
-        bundles[0].generateUniqueBundle(this);
-        bundles[0].submitClusters(cluster);
-        searchPage.refresh();
-        mirrorPage = searchPage.getPageHeader().doCreateMirror();
-        MirrorWizardPage.ClusterBlock source = mirrorPage.getSourceBlock();
-        MirrorWizardPage.ClusterBlock target = mirrorPage.getTargetBlock();
-        String clusterName = bundles[0].getClusterNames().get(0);
-        String mirrorName = bundles[0].getProcessName();
-
-        baseMap.put(Summary.RUN_ON, clusterName);
-        baseMap.put(Summary.NAME, mirrorName);
-        baseMap.put(Summary.SOURCE_CLUSTER, clusterName);
-        baseMap.put(Summary.TARGET_CLUSTER, clusterName);
-
-        mirrorPage.setName(mirrorName);
-
-        source.setPath(baseTestDir);
-        source.selectCluster(clusterName);
-        target.setPath(baseTestDir);
-        target.selectCluster(clusterName);
-
-        mirrorPage.setStartTime(start);
-        mirrorPage.setEndTime(end);
-
-    }
-
-    @Test
-    public void testSummaryDefaultScenario() {
-        mirrorPage.next();
-
-        Map<Summary, String> actualParams = mirrorPage.getSummaryProperties();
-
-
-        LOGGER.info("Actual parameters: " + actualParams);
-        LOGGER.info("Expected parameters: " + baseMap);
-
-        Assert.assertEquals(actualParams, baseMap);
-
-        mirrorPage.save();
-        Assert.assertTrue(mirrorPage.getActiveAlertText().contains("Submit successful"),
-            "Submit should be successful");
-    }
-
-    @Test
-    public void testModificationOnPreviousStep() {
-        mirrorPage.next();
-
-        Map<Summary, String> actualParams = mirrorPage.getSummaryProperties();
-
-        LOGGER.info("Actual parameters: " + actualParams);
-        LOGGER.info("Expected parameters: " + baseMap);
-
-        Assert.assertEquals(actualParams, baseMap);
-
-        mirrorPage.previous();
-
-        String newPath = baseTestDir + "/new";
-        mirrorPage.getTargetBlock().setPath(newPath);
-
-        Map<Summary, String> expectedParams = new EnumMap<>(baseMap);
-        expectedParams.put(Summary.TARGET_PATH, newPath);
-
-        LOGGER.info("Target path set to " + newPath);
-
-        mirrorPage.next();
-
-        Assert.assertEquals(mirrorPage.getSummaryProperties(), expectedParams);
-
-
-    }
-
-
-    @Test
-    public void testAdvancedScenario() {
-
-        mirrorPage.toggleAdvancedOptions();
-        mirrorPage.setHdfsDistCpMaxMaps("9");
-        mirrorPage.setHdfsMaxBandwidth("50");
-        mirrorPage.setAclOwner("somebody");
-        mirrorPage.setAclGroup("somegroup");
-        mirrorPage.setAclPermission("0000");
-        mirrorPage.setFrequency(new Frequency("8", Frequency.TimeUnit.hours));
-        Retry retry = new Retry();
-        retry.setAttempts(8);
-        retry.setPolicy(PolicyType.FINAL);
-        retry.setDelay(new Frequency("13", Frequency.TimeUnit.days));
-        mirrorPage.setRetry(retry);
-
-
-        mirrorPage.next();
-
-        Map<Summary, String> actualParams = mirrorPage.getSummaryProperties();
-        Map<Summary, String> expectedParams = new EnumMap<>(baseMap);
-        expectedParams.put(Summary.ACL_OWNER, "somebody");
-        expectedParams.put(Summary.ACL_GROUP, "somegroup");
-        expectedParams.put(Summary.ACL_PERMISSIONS, "0000");
-        expectedParams.put(Summary.MAX_MAPS, "9");
-        expectedParams.put(Summary.MAX_BANDWIDTH, "50");
-        expectedParams.put(Summary.FREQUENCY, "8 hours");
-        expectedParams.put(Summary.RETRY_ATTEMPTS, "8");
-        expectedParams.put(Summary.RETRY_POLICY, "final");
-        expectedParams.put(Summary.RETRY_DELAY, "13 days");
-
-
-        LOGGER.info("Actual parameters: " + actualParams);
-        LOGGER.info("Expected parameters: " + expectedParams);
-
-        Assert.assertEquals(actualParams, expectedParams);
-
-
-    }
-
-
-    @AfterClass(alwaysRun = true)
-    public void tearDownClass() {
-        removeTestClassEntities();
-        closeBrowser();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorTest.java
deleted file mode 100644
index a7887da..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/searchUI/MirrorTest.java
+++ /dev/null
@@ -1,414 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.searchUI;
-
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.entity.v0.cluster.ClusterLocationType;
-import org.apache.falcon.entity.v0.cluster.Interfacetype;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.Entities.ProcessMerlin;
-import org.apache.falcon.regression.Entities.RecipeMerlin;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.supportClasses.NotifyingAssert;
-import org.apache.falcon.regression.core.util.AssertUtil;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.HadoopUtil;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.testHelper.BaseUITestClass;
-import org.apache.falcon.regression.ui.search.LoginPage;
-import org.apache.falcon.regression.ui.search.MirrorWizardPage;
-import org.apache.falcon.regression.ui.search.SearchPage;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.security.authentication.client.AuthenticationException;
-import org.apache.hive.hcatalog.api.HCatClient;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.OozieClient;
-import org.testng.Assert;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.util.Arrays;
-
-/** UI tests for Mirror Setup Wizard. */
-@Test(groups = {"search-ui", "multiCluster"})
-public class MirrorTest extends BaseUITestClass {
-    private static final Logger LOGGER = Logger.getLogger(MirrorTest.class);
-    private final String baseTestDir = cleanAndGetTestDir();
-    private final String hdfsSrcDir = baseTestDir + "/hdfsSrcDir";
-    private final String hdfsTgtDir = baseTestDir + "/hdfsTgtDir";
-    private final String hdfsStrictDir = baseTestDir + "/strictDir";
-    private static final String DB_NAME = "MirrorTest";
-    private static final String DB2_NAME = "MirrorTest2";
-    private static final String TBL1_NAME = "MirrorTable1";
-    private static final String TBL2_NAME = "MirrorTable2";
-    private final ColoHelper cluster = servers.get(0);
-    private final ColoHelper cluster2 = servers.get(1);
-    private final FileSystem clusterFS = serverFS.get(0);
-    private final FileSystem clusterFS2 = serverFS.get(1);
-    private final OozieClient clusterOC = serverOC.get(0);
-    private final OozieClient clusterOC2 = serverOC.get(1);
-    private HCatClient clusterHC;
-    private HCatClient clusterHC2;
-    private RecipeMerlin recipeMerlin;
-    private Connection connection;
-    private Connection connection2;
-    private MirrorWizardPage mirrorPage;
-
-    /**
-     * Submit one cluster, 2 feeds and 10 processes with 1 to 10 tags (1st process has 1 tag,
-     * 2nd - two tags.. 10th has 10 tags).
-     * @throws URISyntaxException
-     * @throws IOException
-     * @throws AuthenticationException
-     * @throws InterruptedException
-     */
-    @BeforeMethod(alwaysRun = true)
-    public void setup() throws Exception {
-        bundles[0] = new Bundle(BundleUtil.readHCatBundle(), cluster);
-        bundles[1] = new Bundle(BundleUtil.readHCatBundle(), cluster2);
-        bundles[0].generateUniqueBundle(this);
-        bundles[1].generateUniqueBundle(this);
-        final ClusterMerlin srcCluster = bundles[0].getClusterElement();
-        final ClusterMerlin tgtCluster = bundles[1].getClusterElement();
-        Bundle.submitCluster(bundles[0], bundles[1]);
-
-        recipeMerlin = RecipeMerlin.readFromDir("HiveDrRecipe",
-            FalconCLI.RecipeOperation.HIVE_DISASTER_RECOVERY)
-            .withRecipeCluster(srcCluster);
-        recipeMerlin.withSourceCluster(srcCluster)
-            .withTargetCluster(tgtCluster)
-            .withFrequency(new Frequency("5", Frequency.TimeUnit.minutes))
-            .withValidity(TimeUtil.getTimeWrtSystemTime(-5), TimeUtil.getTimeWrtSystemTime(5));
-        recipeMerlin.setUniqueName(this.getClass().getSimpleName());
-        recipeMerlin.withSourceDb(DB_NAME);
-        HadoopUtil.recreateDir(clusterFS, hdfsStrictDir);
-        HadoopUtil.recreateDir(clusterFS2, hdfsStrictDir);
-        clusterFS.setPermission(new Path(hdfsStrictDir), FsPermission.valueOf("drwx------"));
-        clusterFS2.setPermission(new Path(hdfsStrictDir), FsPermission.valueOf("drwx------"));
-        openBrowser();
-        SearchPage searchPage = LoginPage.open(getDriver()).doDefaultLogin();
-        mirrorPage = searchPage.getPageHeader().doCreateMirror();
-        mirrorPage.checkPage();
-    }
-
-
-    @AfterMethod(alwaysRun = true)
-    public void tearDown() throws IOException {
-        removeTestClassEntities();
-        closeBrowser();
-    }
-
-    @Test
-    public void testHeader() throws Exception {
-        mirrorPage.getPageHeader().checkHeader();
-    }
-
-    /**
-     * Create DB on source with 1 table.
-     * Select Dataset type as FileSystem. Select source and target as hdfs.
-     * Populate all fields (name, source, target, validity etc.) with correct and existing values.
-     * Click next. Create mirror.
-     * Using get entity definition API check that entity has been created.
-     * @throws Exception
-     */
-    @Test(enabled = false)
-    public void testHdfsDefaultScenario() throws Exception {
-        final ClusterMerlin srcCluster = bundles[0].getClusterElement();
-        final ClusterMerlin tgtCluster = bundles[1].getClusterElement();
-        RecipeMerlin hdfsRecipe = RecipeMerlin.readFromDir("HdfsRecipe",
-            FalconCLI.RecipeOperation.HDFS_REPLICATION)
-            .withRecipeCluster(srcCluster);
-        hdfsRecipe.withSourceCluster(srcCluster)
-            .withTargetCluster(tgtCluster)
-            .withFrequency(new Frequency("5", Frequency.TimeUnit.minutes))
-            .withValidity(TimeUtil.getTimeWrtSystemTime(-5), TimeUtil.getTimeWrtSystemTime(5));
-        hdfsRecipe.setUniqueName(this.getClass().getSimpleName());
-        hdfsRecipe.withSourceDir(hdfsSrcDir).withTargetDir(hdfsTgtDir);
-        hdfsRecipe.setTags(Arrays.asList("key1=val1", "key2=val2", "key3=val3"));
-
-        mirrorPage.applyRecipe(hdfsRecipe, true);
-        mirrorPage.next();
-        mirrorPage.save();
-
-        AssertUtil.assertSucceeded(prism.getProcessHelper().getStatus(
-            createFakeProcessForRecipe(bundles[0].getProcessObject(), recipeMerlin)));
-    }
-
-    /**
-     * Create DB on source with 1 table.
-     * Select Dataset type as Hive.
-     * Populate all fields (name, source, target, validity etc.) with correct and existing values.
-     * Click next. Create mirror.
-     * Using get entity definition API check that entity has been created.
-     * @throws Exception
-     */
-    @Test(dataProvider = "getDbsAndTbls")
-    public void testHiveDefaultScenario(String dbName, String tblName) throws Exception {
-        recipeMerlin.withSourceDb(dbName);
-        recipeMerlin.withSourceTable(tblName);
-        recipeMerlin.setTags(Arrays.asList("key1=val1", "key2=val2", "key3=val3"));
-        mirrorPage.applyRecipe(recipeMerlin, true);
-        mirrorPage.next();
-        mirrorPage.save();
-        AssertUtil.assertSucceeded(prism.getProcessHelper().getStatus(
-            createFakeProcessForRecipe(bundles[0].getProcessObject(), recipeMerlin)));
-    }
-
-    @DataProvider
-    public Object[][] getDbsAndTbls() {
-        return new String[][]{
-            {DB_NAME, ""},
-            {DB_NAME + ',' + DB2_NAME, ""},
-            {DB_NAME, TBL1_NAME + ',' + TBL2_NAME},
-        };
-    }
-
-    /**
-     *  If "send alerts to" is empty on HiveDR UI, default value for drNotificationReceivers property must be "NA".
-     */
-    @Test
-    public void testSendAlertsDefaultValue()
-        throws InterruptedException, IOException, URISyntaxException, AuthenticationException {
-        recipeMerlin.withSourceDb(DB_NAME);
-        recipeMerlin.withSourceTable(TBL1_NAME);
-        mirrorPage.applyRecipe(recipeMerlin, false);
-        mirrorPage.next();
-        mirrorPage.save();
-        ProcessMerlin process = bundles[0].getProcessObject();
-        process.setName(recipeMerlin.getName());
-        process = new ProcessMerlin(cluster.getProcessHelper().getEntityDefinition(process.toString()).getMessage());
-        String drNotificationReceivers = process.getProperty("drNotificationReceivers");
-        Assert.assertTrue(drNotificationReceivers != null && drNotificationReceivers.equals("NA"),
-            "Default value for drNotificationReceivers should be NA.");
-
-        /* particular check that on table replication scenario UI doesn't pick up thrift server
-           end point in place of Hive server2 end point*/
-        String expectedUri = recipeMerlin.getTgtCluster().getInterfaceEndpoint(Interfacetype.REGISTRY)
-            .replace("thrift", "hive2").replace("9083", "10000");
-        Assert.assertEquals(process.getProperty("targetHiveServer2Uri"), expectedUri,
-            "Hive server2 end point should be picked by UI.");
-        expectedUri = recipeMerlin.getSrcCluster().getInterfaceEndpoint(Interfacetype.REGISTRY)
-            .replace("thrift", "hive2").replace("9083", "10000");
-        Assert.assertEquals(process.getProperty("sourceHiveServer2Uri"), expectedUri,
-            "Hive server2 end point should be picked by UI.");
-    }
-
-    /**
-     * Test that Hive DR UI doesn't picks thrift server end point in place of Hive server2 end point.
-     * Test that specified HDFS target staging path on Hive DR UI, isn't getting assigned to "*".
-     */
-    @Test
-    public void testHDFSTargetStagingPath()
-        throws URISyntaxException, AuthenticationException, InterruptedException, IOException {
-        recipeMerlin.withSourceDb(DB_NAME);
-        mirrorPage.applyRecipe(recipeMerlin, false);
-        mirrorPage.next();
-        mirrorPage.save();
-        ProcessMerlin process = bundles[0].getProcessObject();
-        process.setName(recipeMerlin.getName());
-        process = new ProcessMerlin(cluster.getProcessHelper().getEntityDefinition(process.toString()).getMessage());
-
-        // check that that Hive DR UI doesn't picks thrift server end point in place of Hive server2 end point
-        String expectedUri = recipeMerlin.getTgtCluster().getInterfaceEndpoint(Interfacetype.REGISTRY)
-            .replace("thrift", "hive2").replace("9083", "10000");
-        Assert.assertEquals(process.getProperty("targetHiveServer2Uri"), expectedUri,
-            "Hive server2 end point should be picked by UI.");
-        expectedUri = recipeMerlin.getSrcCluster().getInterfaceEndpoint(Interfacetype.REGISTRY)
-            .replace("thrift", "hive2").replace("9083", "10000");
-        Assert.assertEquals(process.getProperty("sourceHiveServer2Uri"), expectedUri,
-            "Hive server2 end point should be picked by UI.");
-
-        //check that that specified HDFS target staging path on Hive DR UI, isn't getting assigned to "*"
-        Assert.assertFalse(process.getProperty("targetStagingPath").equals("*"),
-            "HDFS target staging path shouldn't be assigned to '*'.");
-    }
-
-    /**
-     * Test recipe with bad acls.
-     * Set owner/group as invalid string (utf-8, special chars, number).
-     * Check that user is not allowed to go to the next step and has been notified with an alert.
-     * Set permissions as 4digit number, negative, string, 000. Check the same.
-     */
-    @Test(enabled = false)
-    public void testInvalidAcl() {
-        recipeMerlin.setTags(Arrays.asList("key1=val1", "key2=val2", "key3=val3"));
-        final String goodAclOwner = MerlinConstants.CURRENT_USER_NAME;
-        final String goodAclGroup = MerlinConstants.CURRENT_USER_GROUP;
-        final String goodAclPerms = "777";
-        mirrorPage.applyRecipe(recipeMerlin, true);
-        NotifyingAssert notifyingAssert = new NotifyingAssert(true);
-        for(String badAclOwner: new String[] {"utf8\u20ACchar", "speci@l", "123"}) {
-            mirrorPage.setAclOwner(badAclOwner);
-            notifyingAssert.assertTrue(mirrorPage.isAclOwnerWarningDisplayed(),
-                "Expecting invalid owner warning to be displayed for bad acl owner: " + badAclOwner);
-            mirrorPage.next(); //should not go through
-            if (mirrorPage.getStepNumber() == 2) {
-                mirrorPage.silentPrevious();
-                mirrorPage.toggleAdvancedOptions();
-            }
-            mirrorPage.setAclOwner(goodAclOwner);
-            notifyingAssert.assertFalse(mirrorPage.isAclOwnerWarningDisplayed(),
-                "Expecting invalid owner warning to not be displayed for good acl owner: " + goodAclOwner);
-        }
-
-        for(String badAclGroup: new String[] {"utf8\u20ACchar", "speci@l", "123"}) {
-            mirrorPage.setAclGroup(badAclGroup);
-            notifyingAssert.assertTrue(mirrorPage.isAclGroupWarningDisplayed(),
-                "Expecting invalid group warning to be displayed for bad acl group: " + badAclGroup);
-            mirrorPage.next(); //should not go through
-            if (mirrorPage.getStepNumber() == 2) {
-                mirrorPage.silentPrevious();
-                mirrorPage.toggleAdvancedOptions();
-            }
-            mirrorPage.setAclGroup(goodAclGroup);
-            notifyingAssert.assertFalse(mirrorPage.isAclGroupWarningDisplayed(),
-                "Expecting invalid group warning to not be displayed for good acl group: " + goodAclGroup);
-        }
-
-        for(String badAclPermission: new String[] {"1234", "-123", "str", "000", "1*", "*1"}) {
-            mirrorPage.setAclPermission(badAclPermission);
-            notifyingAssert.assertTrue(mirrorPage.isAclPermissionWarningDisplayed(),
-                "Expecting invalid permission warning to be displayed for bad acl permission: " + badAclPermission);
-            mirrorPage.next(); //should not go through
-            if (mirrorPage.getStepNumber() == 2) {
-                mirrorPage.silentPrevious();
-                mirrorPage.toggleAdvancedOptions();
-            }
-            mirrorPage.setAclPermission(goodAclPerms); //clear error
-            notifyingAssert.assertFalse(mirrorPage.isAclPermissionWarningDisplayed(),
-                "Expecting invalid permission warning to not be displayed for good acl permission: " + goodAclPerms);
-        }
-        notifyingAssert.assertAll();
-    }
-
-    /**
-     * Select Hive as dataset type.
-     * Set source/target staging paths as path with invalid pattern, digit, empty value, special/utf-8 symbols. Check
-     * that user is not allowed
-     to go to the next step and has been notified with an alert.
-     */
-    @Test(enabled = false)
-    public void testHiveAdvancedInvalidStaging() {
-        recipeMerlin.withSourceDb(DB_NAME);
-        recipeMerlin.setTags(Arrays.asList("key1=val1", "key2=val2", "key3=val3"));
-        mirrorPage.applyRecipe(recipeMerlin, true);
-        NotifyingAssert notifyingAssert = new NotifyingAssert(true);
-        final String goodSrcStaging = recipeMerlin.getSrcCluster().getLocation(ClusterLocationType.STAGING).getPath();
-        final String goodTgtStaging = recipeMerlin.getTgtCluster().getLocation(ClusterLocationType.STAGING).getPath();
-        final String[] badTestPaths = new String[] {"not_a_path", "", "not/allowed"};
-        for (String path : badTestPaths) {
-            mirrorPage.setSourceStaging(path);
-            //check error
-            mirrorPage.next();
-            if (mirrorPage.getStepNumber() == 2) {
-                notifyingAssert.fail(
-                    "Navigation to page 2 should not be allowed as source staging path is bad: " + path);
-                mirrorPage.silentPrevious();
-                mirrorPage.toggleAdvancedOptions();
-            }
-            mirrorPage.setSourceStaging(goodSrcStaging);
-            //check error disappeared
-        }
-        for (String path : badTestPaths) {
-            mirrorPage.setTargetStaging(path);
-            //check error
-            mirrorPage.next();
-            if (mirrorPage.getStepNumber() == 2) {
-                notifyingAssert.fail(
-                    "Navigation to page 2 should not be allowed as target staging path is bad: " + path);
-                mirrorPage.silentPrevious();
-                mirrorPage.toggleAdvancedOptions();
-            }
-            mirrorPage.setTargetStaging(goodTgtStaging);
-            //check error disappeared
-        }
-        notifyingAssert.assertAll();
-    }
-
-    /**
-     * Select Hive as dataset type.
-     * Set source/target staging paths as path pointing to directories with strict permissions
-     * (another owner, 700 permissions).
-     * Check that user is not allowed to go to the next step and has been notified with an alert.
-     */
-    @Test(enabled = false)
-    public void testHiveAdvancedStagingAcl() throws Exception {
-        recipeMerlin.withSourceDb(DB_NAME);
-        recipeMerlin.setTags(Arrays.asList("key1=val1", "key2=val2", "key3=val3"));
-        mirrorPage.applyRecipe(recipeMerlin, true);
-        NotifyingAssert notifyingAssert = new NotifyingAssert(true);
-        final String goodSrcStaging = recipeMerlin.getSrcCluster().getLocation(ClusterLocationType.STAGING).getPath();
-        final String goodTgtStaging = recipeMerlin.getTgtCluster().getLocation(ClusterLocationType.STAGING).getPath();
-        final String[] badTestPaths = new String[] {"/apps", hdfsStrictDir};
-        for (String path : badTestPaths) {
-            mirrorPage.setSourceStaging(path);
-            //check error
-            mirrorPage.next();
-            if (mirrorPage.getStepNumber() == 2) {
-                notifyingAssert.fail(
-                    "Navigation to page 2 should not be allowed as source staging path is bad: " + path
-                        + " (" + clusterFS.getFileStatus(new Path(path)) + ")");
-
-                mirrorPage.silentPrevious();
-                mirrorPage.toggleAdvancedOptions();
-            }
-            mirrorPage.setSourceStaging(goodSrcStaging);
-            //check error disappeared
-        }
-        for (String path : badTestPaths) {
-            mirrorPage.setTargetStaging(path);
-            //check error
-            mirrorPage.next();
-            if (mirrorPage.getStepNumber() == 2) {
-                notifyingAssert.fail(
-                    "Navigation to page 2 should not be allowed as target staging path is bad: " + path
-                        + " (" + clusterFS.getFileStatus(new Path(path)) + ")");
-                mirrorPage.silentPrevious();
-                mirrorPage.toggleAdvancedOptions();
-            }
-            mirrorPage.setTargetStaging(goodTgtStaging);
-            //check error disappeared
-        }
-        notifyingAssert.assertAll();
-    }
-
-    /**
-     * Hack to work with process corresponding to recipe.
-     * @param processMerlin process merlin to be modified
-     *                      (ideally we want to get rid of this and use recipe to generate a fake process xml)
-     * @param recipe recipe object that need to be faked
-     * @return
-     */
-    private String createFakeProcessForRecipe(ProcessMerlin processMerlin, RecipeMerlin recipe) {
-        processMerlin.setName(recipe.getName());
-        return processMerlin.toString();
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/pom.xml b/falcon-regression/pom.xml
index b11cd83..1692323 100644
--- a/falcon-regression/pom.xml
+++ b/falcon-regression/pom.xml
@@ -183,6 +183,12 @@
             </dependency>
 
             <dependency>
+                <groupId>org.apache.falcon</groupId>
+                <artifactId>falcon-cli</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+
+            <dependency>
                 <groupId>com.google.code.findbugs</groupId>
                 <artifactId>annotations</artifactId>
                 <version>2.0.1</version>


[2/2] falcon git commit: FALCON-2127 Falcon regression compilation issue and deleting depricated recipe test cases

Posted by pr...@apache.org.
FALCON-2127 Falcon regression compilation issue and deleting depricated recipe test cases

This fix deletes the recipe bases test cases since the recipes are deprecated and moving to server side extensions.
https://issues.apache.org/jira/browse/FALCON-634
Also fixes the current falcon regression compilation issues.

The following files are deleted .
RecipeMerlin.java
MirrorWizardPage.java
HdfsRecipeTest.java
HiveDRTest.java
HiveDbDRTest.java
HiveObjectCreator.java
RecipeExecLocation.java
MirrorSourceTargetOptionsTest.java
MirrorSummaryTest.java
MirrorTest.java

Author: Murali Ramasami <mr...@hortonworks.com>

Reviewers: "Pragya Mittal <mi...@gmail.com>"

Closes #275 from muraliramasami/master and squashes the following commits:

346866b [Murali Ramasami] FALCON-2127:Fix the falcon regression compilation issue and delete the depricated recipe test cases
d6719d3 [Murali Ramasami] FALCON-2127:Fix the falcon regression compilation issue and delete the depricated recipe test cases


Project: http://git-wip-us.apache.org/repos/asf/falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/falcon/commit/c00975e4
Tree: http://git-wip-us.apache.org/repos/asf/falcon/tree/c00975e4
Diff: http://git-wip-us.apache.org/repos/asf/falcon/diff/c00975e4

Branch: refs/heads/master
Commit: c00975e419a4bf0f191d51de1294b3c2e044a83a
Parents: 35b10b4
Author: Murali Ramasami <mr...@hortonworks.com>
Authored: Fri Aug 26 17:54:19 2016 +0530
Committer: Pragya Mittal <mi...@gmail.com>
Committed: Fri Aug 26 17:54:19 2016 +0530

----------------------------------------------------------------------
 falcon-regression/merlin-core/pom.xml           |   5 +
 .../regression/Entities/RecipeMerlin.java       | 366 ---------
 .../regression/ui/search/MirrorWizardPage.java  | 517 -------------
 .../falcon/regression/ui/search/PageHeader.java |  11 -
 .../regression/hive/dr/HdfsRecipeTest.java      | 131 ----
 .../falcon/regression/hive/dr/HiveDRTest.java   | 736 -------------------
 .../falcon/regression/hive/dr/HiveDbDRTest.java | 279 -------
 .../regression/hive/dr/HiveObjectCreator.java   | 208 ------
 .../regression/hive/dr/RecipeExecLocation.java  |  63 --
 .../searchUI/MirrorSourceTargetOptionsTest.java | 206 ------
 .../regression/searchUI/MirrorSummaryTest.java  | 207 ------
 .../falcon/regression/searchUI/MirrorTest.java  | 414 -----------
 falcon-regression/pom.xml                       |   6 +
 13 files changed, 11 insertions(+), 3138 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin-core/pom.xml
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/pom.xml b/falcon-regression/merlin-core/pom.xml
index d0c651d..4695bf1 100644
--- a/falcon-regression/merlin-core/pom.xml
+++ b/falcon-regression/merlin-core/pom.xml
@@ -227,6 +227,11 @@
         </dependency>
 
         <dependency>
+            <groupId>org.apache.falcon</groupId>
+            <artifactId>falcon-cli</artifactId>
+        </dependency>
+
+        <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-exec</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java b/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
deleted file mode 100644
index 9b9cff2..0000000
--- a/falcon-regression/merlin-core/src/main/java/org/apache/falcon/regression/Entities/RecipeMerlin.java
+++ /dev/null
@@ -1,366 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.Entities;
-
-import org.apache.commons.configuration.AbstractFileConfiguration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.filefilter.FalseFileFilter;
-import org.apache.commons.io.filefilter.RegexFileFilter;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.entity.v0.cluster.Interfacetype;
-import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
-import org.apache.falcon.entity.v0.process.ACL;
-import org.apache.falcon.entity.v0.process.PolicyType;
-import org.apache.falcon.entity.v0.process.Retry;
-import org.apache.falcon.regression.core.util.Config;
-import org.apache.falcon.regression.core.util.OSUtil;
-import org.apache.log4j.Logger;
-import org.testng.Assert;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.UUID;
-
-/** Class for representing a falcon recipe. */
-public final class RecipeMerlin {
-    private static final Logger LOGGER = Logger.getLogger(RecipeMerlin.class);
-    private static final String WORKFLOW_PATH_KEY = "falcon.recipe.workflow.path";
-    private static final String RECIPE_NAME_KEY = "falcon.recipe.name";
-    private static final String WRITE_DIR =
-        Config.getProperty("recipe.location", "/tmp/falcon-recipe");
-
-    private String template;
-    private AbstractFileConfiguration properties;
-    private String workflow;
-    private ClusterMerlin recipeCluster;
-    private ClusterMerlin srcCluster;
-    private ClusterMerlin tgtCluster;
-
-
-    public ClusterMerlin getRecipeCluster() {
-        return recipeCluster;
-    }
-
-    public ClusterMerlin getSrcCluster() {
-        return srcCluster;
-    }
-
-    public ClusterMerlin getTgtCluster() {
-        return tgtCluster;
-    }
-
-    public FalconCLI.RecipeOperation getRecipeOperation() {
-        return recipeOperation;
-    }
-
-    private FalconCLI.RecipeOperation recipeOperation;
-
-    private RecipeMerlin() {
-    }
-
-    public String getName() {
-        return properties.getString(RECIPE_NAME_KEY);
-    }
-
-    public void setUniqueName(String prefix) {
-        properties.setProperty(RECIPE_NAME_KEY, prefix + UUID.randomUUID().toString().split("-")[0]);
-    }
-
-    public String getSourceDir() {
-        return properties.getString("drSourceDir");
-    }
-
-    public RecipeMerlin withSourceDir(final String srcDir) {
-        properties.setProperty("drSourceDir", srcDir);
-        return this;
-    }
-
-    public String getTargetDir() {
-        return properties.getString("drTargetDir");
-    }
-
-    public RecipeMerlin withTargetDir(final String tgtDir) {
-        properties.setProperty("drTargetDir", tgtDir);
-        return this;
-    }
-
-    public String getSourceDb() {
-        return StringUtils.join(properties.getStringArray("sourceDatabase"), ',');
-    }
-
-    public RecipeMerlin withSourceDb(final String srcDatabase) {
-        properties.setProperty("sourceDatabase", srcDatabase);
-        return this;
-    }
-
-    public String getSourceTable() {
-        return StringUtils.join(properties.getStringArray("sourceTable"), ',');
-    }
-
-    public RecipeMerlin withSourceTable(final String tgtTable) {
-        properties.setProperty("sourceTable", tgtTable);
-        return this;
-    }
-
-    public RecipeMerlin withSourceCluster(ClusterMerlin sourceCluster) {
-        this.srcCluster = sourceCluster;
-        if (recipeOperation == FalconCLI.RecipeOperation.HDFS_REPLICATION) {
-            properties.setProperty("drSourceClusterFS", sourceCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-        } else {
-            properties.setProperty("sourceCluster", sourceCluster.getName());
-            properties.setProperty("sourceMetastoreUri", sourceCluster.getProperty("hive.metastore.uris"));
-            properties.setProperty("sourceHiveServer2Uri", sourceCluster.getProperty("hive.server2.uri"));
-            //properties.setProperty("sourceServicePrincipal",
-            //    sourceCluster.getProperty("hive.metastore.kerberos.principal"));
-            properties.setProperty("sourceStagingPath", sourceCluster.getLocation("staging"));
-            properties.setProperty("sourceNN", sourceCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-            properties.setProperty("sourceRM", sourceCluster.getInterfaceEndpoint(Interfacetype.EXECUTE));
-        }
-        return this;
-    }
-
-    public RecipeMerlin withTargetCluster(ClusterMerlin targetCluster) {
-        this.tgtCluster = targetCluster;
-        if (recipeOperation == FalconCLI.RecipeOperation.HDFS_REPLICATION) {
-            properties.setProperty("drTargetClusterFS", targetCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-        } else {
-            properties.setProperty("targetCluster", targetCluster.getName());
-            properties.setProperty("targetMetastoreUri", targetCluster.getProperty("hive.metastore.uris"));
-            properties.setProperty("targetHiveServer2Uri", targetCluster.getProperty("hive.server2.uri"));
-            //properties.setProperty("targetServicePrincipal",
-            //    targetCluster.getProperty("hive.metastore.kerberos.principal"));
-            properties.setProperty("targetStagingPath", targetCluster.getLocation("staging"));
-            properties.setProperty("targetNN", targetCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-            properties.setProperty("targetRM", targetCluster.getInterfaceEndpoint(Interfacetype.EXECUTE));
-        }
-        return this;
-    }
-
-    public RecipeMerlin withRecipeCluster(ClusterMerlin paramRecipeCluster) {
-        this.recipeCluster = paramRecipeCluster;
-        properties.setProperty("falcon.recipe.cluster.name", paramRecipeCluster.getName());
-        properties.setProperty("falcon.recipe.cluster.hdfs.writeEndPoint",
-            paramRecipeCluster.getInterfaceEndpoint(Interfacetype.WRITE));
-        return this;
-    }
-
-    public RecipeMerlin withValidity(final String start, final String end) {
-        properties.setProperty("falcon.recipe.cluster.validity.start", start);
-        properties.setProperty("falcon.recipe.cluster.validity.end", end);
-        return this;
-    }
-
-    public String getValidityStart() {
-        return properties.getString("falcon.recipe.cluster.validity.start");
-    }
-
-    public String getValidityEnd() {
-        return properties.getString("falcon.recipe.cluster.validity.end");
-    }
-
-    public RecipeMerlin withFrequency(final Frequency frequency) {
-        properties.setProperty("falcon.recipe.process.frequency", frequency.toString());
-        return this;
-    }
-
-    public Frequency getFrequency() {
-        return Frequency.fromString(properties.getString("falcon.recipe.process.frequency"));
-    }
-
-    public String getMaxEvents() {
-        return properties.getString("maxEvents");
-    }
-
-    public String getReplicationMaxMaps() {
-        return properties.getString("replicationMaxMaps");
-    }
-
-    public String getDistCpMaxMaps() {
-        return properties.getString("distcpMaxMaps");
-    }
-
-    public String getMapBandwidth() {
-        return properties.getString("distcpMapBandwidth");
-    }
-
-    public Retry getRetry() {
-        final int retryAttempts = properties.getInt("falcon.recipe.retry.attempts");
-        final String retryDelay = properties.getString("falcon.recipe.retry.delay");
-        final String retryPolicy = properties.getString("falcon.recipe.retry.policy");
-
-        Retry retry = new Retry();
-        retry.setAttempts(retryAttempts);
-        retry.setDelay(Frequency.fromString(retryDelay));
-        retry.setPolicy(PolicyType.fromValue(retryPolicy));
-        return retry;
-    }
-
-    public ACL getAcl() {
-        ACL acl = new ACL();
-        acl.setOwner(properties.getString("falcon.recipe.acl.owner"));
-        acl.setGroup(properties.getString("falcon.recipe.acl.group"));
-        acl.setPermission(properties.getString("falcon.recipe.acl.permission"));
-        return acl;
-    }
-
-
-    /**
-     * Read recipe from a given directory. Expecting that recipe will follow these conventions.
-     * <br> 1. properties file will have .properties extension
-     * <br> 2. template file will have end with -template.xml
-     * <br> 3. workflow file will have end with -workflow.xml
-     * @param readPath the location from where recipe will be read
-     * @param recipeOperation operation of this recipe
-     */
-    public static RecipeMerlin readFromDir(final String readPath,
-                                           FalconCLI.RecipeOperation recipeOperation) {
-        Assert.assertTrue(StringUtils.isNotEmpty(readPath), "readPath for recipe can't be empty");
-        Assert.assertNotNull(recipeOperation, "readPath for recipe can't be empty");
-        RecipeMerlin instance = new RecipeMerlin();
-        instance.recipeOperation = recipeOperation;
-        LOGGER.info("Loading recipe from directory: " + readPath);
-        File directory = null;
-        try {
-            directory = new File(RecipeMerlin.class.getResource("/" + readPath).toURI());
-        } catch (URISyntaxException e) {
-            Assert.fail("could not find dir: " + readPath);
-        }
-        final Collection<File> propertiesFiles = FileUtils.listFiles(directory,
-            new RegexFileFilter(".*\\.properties"), FalseFileFilter.INSTANCE);
-        Assert.assertEquals(propertiesFiles.size(), 1,
-            "Expecting only one property file at: " + readPath +" found: " + propertiesFiles);
-        try {
-            instance.properties =
-                new PropertiesConfiguration(propertiesFiles.iterator().next());
-        } catch (ConfigurationException e) {
-            Assert.fail("Couldn't read recipe's properties file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-        instance.properties.setFileName(null); //prevent accidental overwrite of template
-        //removing defaults - specific test need to supplied this
-        instance.properties.clearProperty("sourceDatabase");
-        instance.properties.clearProperty("sourceTable");
-        instance.properties.clearProperty("targetDatabase");
-        instance.properties.clearProperty("targetTable");
-        instance.properties.setProperty("falcon.recipe.acl.owner", MerlinConstants.CURRENT_USER_NAME);
-        instance.properties.setProperty("falcon.recipe.acl.group", MerlinConstants.CURRENT_USER_GROUP);
-        instance.properties.setProperty("falcon.recipe.acl.permission", "*");
-
-        final Collection<File> templatesFiles = FileUtils.listFiles(directory,
-            new RegexFileFilter(".*-template\\.xml"), FalseFileFilter.INSTANCE);
-        Assert.assertEquals(templatesFiles.size(), 1,
-            "Expecting only one template file at: " + readPath + " found: " + templatesFiles);
-        try {
-            instance.template =
-                FileUtils.readFileToString(templatesFiles.iterator().next());
-        } catch (IOException e) {
-            Assert.fail("Couldn't read recipe's template file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-
-        final Collection<File> workflowFiles = FileUtils.listFiles(directory,
-            new RegexFileFilter(".*-workflow\\.xml"), FalseFileFilter.INSTANCE);
-        Assert.assertEquals(workflowFiles.size(), 1,
-            "Expecting only one workflow file at: " + readPath + " found: " + workflowFiles);
-        try {
-            instance.workflow = FileUtils.readFileToString(workflowFiles.iterator().next());
-        } catch (IOException e) {
-            Assert.fail("Couldn't read recipe's workflow file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-        return instance;
-    }
-
-    /**
-     * Write recipe.
-     */
-    private void write() {
-        final String templateFileLocation = OSUtil.concat(WRITE_DIR, getName() + "-template.xml");
-        try {
-            Assert.assertNotNull(templateFileLocation,
-                "Write location for template file is unexpectedly null.");
-            FileUtils.writeStringToFile(new File(templateFileLocation), template);
-        } catch (IOException e) {
-            Assert.fail("Couldn't write recipe's template file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-
-        final String workflowFileLocation = OSUtil.concat(WRITE_DIR, getName() + "-workflow.xml");
-        try {
-            Assert.assertNotNull(workflowFileLocation,
-                "Write location for workflow file is unexpectedly null.");
-            FileUtils.writeStringToFile(new File(workflowFileLocation), workflow);
-        } catch (IOException e) {
-            Assert.fail("Couldn't write recipe's workflow file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-        properties.setProperty(WORKFLOW_PATH_KEY, workflowFileLocation);
-        properties.setProperty("falcon.recipe.workflow.name", getName() + "-workflow");
-
-        final String propFileLocation = OSUtil.concat(WRITE_DIR, getName() + ".properties");
-        try {
-            Assert.assertNotNull(propFileLocation,
-                "Write location for properties file is unexpectedly null.");
-            properties.save(new File(propFileLocation));
-        } catch (ConfigurationException e) {
-            Assert.fail("Couldn't write recipe's process file because of exception: "
-                + ExceptionUtils.getStackTrace(e));
-        }
-    }
-
-    /**
-     * Get submission command.
-     */
-    public List<String> getSubmissionCommand() {
-        write();
-        final List<String> cmd = new ArrayList<>();
-        Collections.addAll(cmd, "recipe", "-name", getName(),
-            "-operation", recipeOperation.toString());
-        return cmd;
-    }
-
-    /**
-     * Set tags for recipe.
-     */
-    public List<String> getTags() {
-        final String tagsStr = properties.getString("falcon.recipe.tags");
-        if (StringUtils.isEmpty(tagsStr)) {
-            return new ArrayList<>();
-        }
-        return Arrays.asList(tagsStr.split(","));
-    }
-
-    /**
-     * Set tags for recipe.
-     */
-    public void setTags(List<String> tags) {
-        properties.setProperty("falcon.recipe.tags", StringUtils.join(tags, ','));
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/MirrorWizardPage.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/MirrorWizardPage.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/MirrorWizardPage.java
deleted file mode 100644
index f990c92..0000000
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/MirrorWizardPage.java
+++ /dev/null
@@ -1,517 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.ui.search;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.entity.v0.cluster.Interfacetype;
-import org.apache.falcon.entity.v0.process.ACL;
-import org.apache.falcon.entity.v0.process.Retry;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.Entities.RecipeMerlin;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.core.util.UIAssert;
-import org.apache.log4j.Logger;
-import org.joda.time.DateTime;
-import org.joda.time.format.DateTimeFormat;
-import org.openqa.selenium.By;
-import org.openqa.selenium.WebDriver;
-import org.openqa.selenium.WebElement;
-import org.openqa.selenium.support.FindBy;
-import org.openqa.selenium.support.FindBys;
-
-import java.util.EnumMap;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/** Page object of the Mirror creation page. */
-public class MirrorWizardPage extends AbstractSearchPage {
-    private static final Logger LOGGER = Logger.getLogger(MirrorWizardPage.class);
-    @FindBys({
-        @FindBy(className = "mainUIView"),
-        @FindBy(className = "formPage")
-    })
-    private WebElement mirrorBox;
-
-    public MirrorWizardPage(WebDriver driver) {
-        super(driver);
-    }
-
-    @Override
-    public void checkPage() {
-        UIAssert.assertDisplayed(mirrorBox, "Mirror box");
-    }
-
-
-    public void setName(String name) {
-        clearAndSetByNgModel("UIModel.name", name);
-    }
-
-    public void setTags(List<String> tags) {
-        //TODO add code here
-    }
-
-    public void setMirrorType(FalconCLI.RecipeOperation recipeOperation) {
-        switch (recipeOperation) {
-        case HDFS_REPLICATION:
-            driver.findElement(By.xpath("//button[contains(.,'File System')]")).click();
-            break;
-        case HIVE_DISASTER_RECOVERY:
-            driver.findElement(By.xpath("//button[contains(.,'HIVE')]")).click();
-            break;
-        default:
-            break;
-        }
-    }
-
-
-    public void setHiveReplication(RecipeMerlin recipeMerlin) {
-        if (StringUtils.isNotEmpty(recipeMerlin.getSourceTable())) {
-            clickById("targetHIVETablesRadio");
-            clearAndSetByNgModel("UIModel.source.hiveDatabase", recipeMerlin.getSourceDb());
-            clearAndSetByNgModel("UIModel.source.hiveTables", recipeMerlin.getSourceTable());
-        } else {
-            clickById("targetHIVEDatabaseRadio");
-            clearAndSetByNgModel("UIModel.source.hiveDatabases", recipeMerlin.getSourceDb());
-        }
-    }
-
-
-    public void setStartTime(String validityStartStr) {
-        final DateTime startDate = TimeUtil.oozieDateToDate(validityStartStr);
-
-        clearAndSetByNgModel("UIModel.validity.start", DateTimeFormat.forPattern("MM/dd/yyyy").print(startDate));
-        final WebElement startTimeBox = driver.findElement(By.className("startTimeBox"));
-        final List<WebElement> startHourAndMinute = startTimeBox.findElements(By.tagName("input"));
-        final WebElement hourText = startHourAndMinute.get(0);
-        final WebElement minuteText = startHourAndMinute.get(1);
-        clearAndSet(hourText, DateTimeFormat.forPattern("hh").print(startDate));
-        clearAndSet(minuteText, DateTimeFormat.forPattern("mm").print(startDate));
-        final WebElement amPmButton = startTimeBox.findElement(By.tagName("button"));
-        if (!amPmButton.getText().equals(DateTimeFormat.forPattern("a").print(startDate))) {
-            amPmButton.click();
-        }
-    }
-
-    public void setEndTime(String validityEndStr) {
-        final DateTime validityEnd = TimeUtil.oozieDateToDate(validityEndStr);
-
-        clearAndSetByNgModel("UIModel.validity.end", DateTimeFormat.forPattern("MM/dd/yyyy").print(validityEnd));
-        final WebElement startTimeBox = driver.findElement(By.className("endTimeBox"));
-        final List<WebElement> startHourAndMinute = startTimeBox.findElements(By.tagName("input"));
-        final WebElement hourText = startHourAndMinute.get(0);
-        final WebElement minuteText = startHourAndMinute.get(1);
-        clearAndSet(hourText, DateTimeFormat.forPattern("hh").print(validityEnd));
-        clearAndSet(minuteText, DateTimeFormat.forPattern("mm").print(validityEnd));
-        final WebElement amPmButton = startTimeBox.findElement(By.tagName("button"));
-        if (!amPmButton.getText().equals(DateTimeFormat.forPattern("a").print(validityEnd))) {
-            amPmButton.click();
-        }
-    }
-
-    public void toggleAdvancedOptions() {
-        final WebElement advanceOption = driver.findElement(By.xpath("//h4[contains(.,'Advanced options')]"));
-        advanceOption.click();
-    }
-
-    public void setFrequency(Frequency frequency) {
-        clearAndSetByNgModel("UIModel.frequency.number", frequency.getFrequency());
-        selectNgModelByVisibleText("UIModel.frequency.unit", frequency.getTimeUnit().name().toLowerCase());
-    }
-
-    public void setHdfsDistCpMaxMaps(String distCpMaxMaps) {
-        clearAndSetByNgModel("UIModel.allocation.hdfs.maxMaps", distCpMaxMaps);
-    }
-
-
-    public void setHdfsMaxBandwidth(String replicationMaxMaps) {
-        clearAndSetByNgModel("UIModel.allocation.hdfs.maxBandwidth", replicationMaxMaps);
-    }
-
-    public void setHiveDistCpMaxMaps(String distCpMaxMaps) {
-        clearAndSetByNgModel("UIModel.allocation.hive.maxMapsDistcp", distCpMaxMaps);
-    }
-
-
-    public void setHiveReplicationMaxMaps(String replicationMaxMaps) {
-        clearAndSetByNgModel("UIModel.allocation.hive.maxMapsMirror", replicationMaxMaps);
-    }
-
-    public void setMaxEvents(String maxEvents) {
-        clearAndSetByNgModel("UIModel.allocation.hive.maxMapsEvents", maxEvents);
-    }
-
-    public void setHiveMaxBandwidth(String maxBandWidth) {
-        clearAndSetByNgModel("UIModel.allocation.hive.maxBandwidth", maxBandWidth);
-    }
-
-
-    public void setSourceInfo(ClusterMerlin srcCluster) {
-        setSourceStaging(srcCluster.getLocation("staging"));
-        setSourceHiveEndpoint(srcCluster.getInterfaceEndpoint(Interfacetype.REGISTRY));
-    }
-
-    public void setSourceHiveEndpoint(String hiveEndpoint) {
-        clearAndSetByNgModel("UIModel.hiveOptions.source.hiveServerToEndpoint", hiveEndpoint);
-    }
-
-    public void setSourceStaging(String stagingLocation) {
-        clearAndSetByNgModel("UIModel.hiveOptions.source.stagingPath", stagingLocation);
-    }
-
-    public void setTargetInfo(ClusterMerlin tgtCluster) {
-        setTargetStaging(tgtCluster.getLocation("staging"));
-        setTargetHiveEndpoint(tgtCluster.getInterfaceEndpoint(Interfacetype.REGISTRY));
-    }
-
-    public void setTargetHiveEndpoint(String hiveEndPoint) {
-        clearAndSetByNgModel("UIModel.hiveOptions.target.hiveServerToEndpoint", hiveEndPoint);
-    }
-
-    public void setTargetStaging(String hiveEndpoint) {
-        clearAndSetByNgModel("UIModel.hiveOptions.target.stagingPath", hiveEndpoint);
-    }
-
-    public void setRetry(Retry retry) {
-        selectNgModelByVisibleText("UIModel.retry.policy", retry.getPolicy().toString().toUpperCase());
-        clearAndSetByNgModel("UIModel.retry.delay.number", retry.getDelay().getFrequency());
-        selectNgModelByVisibleText("UIModel.retry.delay.unit", retry.getDelay().getTimeUnit().name().toLowerCase());
-        clearAndSetByNgModel("UIModel.retry.attempts", String.valueOf(retry.getAttempts()));
-    }
-
-
-    public void setAcl(ACL acl) {
-        setAclOwner(acl.getOwner());
-        setAclGroup(acl.getGroup());
-        setAclPermission(acl.getPermission());
-    }
-
-    public void setAclOwner(String aclOwner) {
-        clearAndSetSlowlyByNgModel("UIModel.acl.owner", aclOwner);
-    }
-
-    public boolean isAclOwnerWarningDisplayed() {
-        final WebElement warning =
-            findElementByNgModel("UIModel.acl.owner").findElement(By.xpath("./following-sibling::*"));
-        waitForAngularToFinish();
-        return warning.isDisplayed();
-    }
-
-    public void setAclGroup(String aclGroup) {
-        clearAndSetSlowlyByNgModel("UIModel.acl.group", aclGroup);
-    }
-
-    public boolean isAclGroupWarningDisplayed() {
-        final WebElement warning =
-            findElementByNgModel("UIModel.acl.group").findElement(By.xpath("./following-sibling::*"));
-        waitForAngularToFinish();
-        return warning.isDisplayed();
-    }
-
-    public void setAclPermission(String aclPermission) {
-        clearAndSetSlowlyByNgModel("UIModel.acl.permissions", aclPermission);
-    }
-
-    public boolean isAclPermissionWarningDisplayed() {
-        final WebElement warning =
-            findElementByNgModel("UIModel.acl.permissions").findElement(By.xpath("./following-sibling::*"));
-        waitForAngularToFinish();
-        return warning.isDisplayed();
-    }
-
-    public void next() {
-        final WebElement nextButton = driver.findElement(By.xpath("//button[contains(.,'Next')]"));
-        nextButton.click();
-    }
-
-    public void previous() {
-        final WebElement prevButton = driver.findElement(By.xpath("//button[contains(.,'Previous')]"));
-        prevButton.click();
-    }
-
-    public void silentPrevious() {
-        try {
-            previous();
-        } catch (Exception ignore) {
-            //ignore
-        }
-    }
-
-    public void cancel() {
-        driver.findElement(By.xpath("//a[contains(.,'Cancel')]"));
-    }
-
-    public void save() {
-        final WebElement saveButton = driver.findElement(By.xpath("//button[contains(.,'Save')]"));
-        UIAssert.assertDisplayed(saveButton, "Save button in not displayed.");
-        saveButton.click();
-        waitForAlert();
-    }
-
-    public ClusterBlock getSourceBlock() {
-        return new ClusterBlock("Source");
-    }
-
-    public ClusterBlock getTargetBlock() {
-        return new ClusterBlock("Target");
-    }
-
-    /**
-     * Populates hive dr UI with parameters from recipe.
-     * @param recipe recipe
-     * @param overwriteDefaults should it overwrite HiveDR default values automatically picked up by UI
-     */
-    public void applyRecipe(RecipeMerlin recipe, boolean overwriteDefaults) {
-        final ClusterMerlin srcCluster = recipe.getSrcCluster();
-        final ClusterMerlin tgtCluster = recipe.getTgtCluster();
-        setName(recipe.getName());
-        setTags(recipe.getTags());
-        setMirrorType(recipe.getRecipeOperation());
-        getSourceBlock().selectCluster(srcCluster.getName());
-        getTargetBlock().selectCluster(tgtCluster.getName());
-        getSourceBlock().selectRunHere();
-        setStartTime(recipe.getValidityStart());
-        setEndTime(recipe.getValidityEnd());
-        toggleAdvancedOptions();
-        switch (recipe.getRecipeOperation()) {
-        case HDFS_REPLICATION:
-            getSourceBlock().setPath(recipe.getSourceDir());
-            getTargetBlock().setPath(recipe.getTargetDir());
-            setHdfsDistCpMaxMaps(recipe.getDistCpMaxMaps());
-            setHdfsMaxBandwidth(recipe.getDistCpMaxMaps());
-            break;
-        case HIVE_DISASTER_RECOVERY:
-            setHiveReplication(recipe);
-            setHiveDistCpMaxMaps(recipe.getDistCpMaxMaps());
-            setHiveReplicationMaxMaps(recipe.getReplicationMaxMaps());
-            setMaxEvents(recipe.getMaxEvents());
-            setHiveMaxBandwidth(recipe.getMapBandwidth());
-            if (overwriteDefaults) {
-                setSourceInfo(recipe.getSrcCluster());
-                setTargetInfo(recipe.getTgtCluster());
-            }
-            break;
-        default:
-            break;
-        }
-        setFrequency(recipe.getFrequency());
-        setRetry(recipe.getRetry());
-        setAcl(recipe.getAcl());
-    }
-
-    public int getStepNumber() {
-        try {
-            driver.findElement(By.xpath("//button[contains(.,'Previous')]"));
-            return 2;
-        } catch (Exception ignore) {
-            //ignore
-        }
-        return 1;
-    }
-
-    public Map<Summary, String> getSummaryProperties() {
-        String formText = driver.findElement(By.id("formSummaryBox")).getText();
-        Map<Summary, String> props = new EnumMap<>(Summary.class);
-        props.put(Summary.NAME, getBetween(formText, "Name", "Type"));
-        props.put(Summary.TYPE, getBetween(formText, "Type", "Tags"));
-        props.put(Summary.TAGS, getBetween(formText, "Tags", "Source"));
-        props.put(Summary.RUN_ON, getBetween(formText, "Run On", "Schedule"));
-        props.put(Summary.START, getBetween(formText, "Start on:", "End on:"));
-        props.put(Summary.END, getBetween(formText, "End on:", "Max Maps"));
-        props.put(Summary.MAX_MAPS, getBetween(formText, "Max Maps", "Max Bandwidth"));
-        props.put(Summary.MAX_BANDWIDTH, getBetween(formText, "Max Bandwidth", "ACL"));
-
-        props.put(Summary.ACL_OWNER, getBetween(formText, "Owner:", "Group:"));
-        props.put(Summary.ACL_GROUP, getBetween(formText, "Group:", "Permissions:"));
-        props.put(Summary.ACL_PERMISSIONS, getBetween(formText, "Permissions:", "Retry"));
-
-        props.put(Summary.RETRY_POLICY, getBetween(formText, "Policy:", "delay:"));
-        props.put(Summary.RETRY_DELAY, getBetween(formText, "delay:", "Attempts:"));
-        props.put(Summary.RETRY_ATTEMPTS, getBetween(formText, "Attempts:", "Frequency"));
-
-        props.put(Summary.FREQUENCY, getBetween(formText, "Frequency", "Previous"));
-
-        String source = getBetween(formText, "Source", "Target");
-        String target = getBetween(formText, "Target", "Run On");
-        if ("HDFS".equals(props.get(Summary.TYPE))) {
-            props.put(Summary.SOURCE_LOCATION, getBetween(source, "Location", "Path"));
-            props.put(Summary.TARGET_LOCATION, getBetween(target, "Location", "Path"));
-            if ("HDFS".equals(props.get(Summary.SOURCE_LOCATION))) {
-                props.put(Summary.SOURCE_CLUSTER, getBetween(source, "^", "Location"));
-                props.put(Summary.SOURCE_PATH, getBetween(source, "Path:", "$"));
-
-            } else {
-                props.put(Summary.SOURCE_PATH, getBetween(source, "Path:", "URL"));
-                props.put(Summary.SOURCE_URL, getBetween(source, "URL:", "$"));
-
-            }
-            if ("HDFS".equals(props.get(Summary.TARGET_LOCATION))) {
-                props.put(Summary.TARGET_CLUSTER, getBetween(target, "^", "Location"));
-                props.put(Summary.TARGET_PATH, getBetween(target, "Path:", "$"));
-
-            } else {
-                props.put(Summary.TARGET_PATH, getBetween(target, "Path:", "URL"));
-                props.put(Summary.TARGET_URL, getBetween(target, "URL:", "$"));
-
-            }
-
-        } else {
-            LOGGER.error("TODO Read info for HIVE replication.");
-        }
-
-
-        return props;
-    }
-
-    /** Parts of the mirror summary. */
-    public enum Summary {
-        NAME,
-        TYPE,
-        TAGS,
-        RUN_ON,
-        START,
-        END,
-        MAX_MAPS,
-        MAX_BANDWIDTH,
-        ACL_OWNER,
-        ACL_GROUP,
-        ACL_PERMISSIONS,
-        RETRY_POLICY,
-        RETRY_DELAY,
-        RETRY_ATTEMPTS,
-        FREQUENCY,
-        SOURCE_LOCATION,
-        SOURCE_PATH,
-        SOURCE_CLUSTER,
-        SOURCE_URL,
-        TARGET_LOCATION,
-        TARGET_PATH,
-        TARGET_CLUSTER,
-        TARGET_URL,
-
-    }
-
-    private static String getBetween(String text, String first, String second) {
-        Pattern pattern = Pattern.compile(".*" + first + "(.+)" + second + ".*", Pattern.DOTALL);
-        Matcher matcher = pattern.matcher(text);
-        if (matcher.find()) {
-            return matcher.group(1).trim();
-        } else {
-            return null;
-        }
-    }
-
-    /**
-     * Block of source or target cluster with parameters.
-     */
-    public final class ClusterBlock {
-        private final WebElement mainBlock;
-        private final WebElement runHereButton;
-        private final String blockType;
-
-        private ClusterBlock(String type) {
-            this.blockType = type;
-            mainBlock = driver.findElement(By.xpath("//h3[contains(.,'" + type + "')]/.."));
-            runHereButton = mainBlock.findElement(By.id("runJobOn" + type + "Radio"));
-        }
-
-        public Set<Location> getAvailableLocationTypes() {
-            List<WebElement> inputs = getLocationBox().findElements(By.xpath(".//input"));
-            Set<Location> result = EnumSet.noneOf(Location.class);
-            for (WebElement input : inputs) {
-                result.add(Location.getByInput(input));
-            }
-            return result;
-        }
-
-        public Location getSelectedLocationType() {
-            WebElement selected = getLocationBox()
-                .findElement(By.xpath("//input[contains(@class,'ng-valid-parse')]"));
-            return Location.getByInput(selected);
-        }
-
-        public void setLocationType(Location type) {
-            getLocationBox().findElement(By.xpath(
-                String.format(".//input[translate(@value,'azures','AZURES')='%s']", type.toString()))).click();
-        }
-
-        public void selectRunHere() {
-            runHereButton.click();
-        }
-
-        public Set<String> getAvailableClusters() {
-            List<WebElement> options = mainBlock.findElements(By.xpath(".//option[not(@disabled)]"));
-            Set<String> clusters = new TreeSet<>();
-            for (WebElement option : options) {
-                clusters.add(option.getText());
-            }
-            return clusters;
-        }
-
-        public void selectCluster(String clusterName) {
-            selectNgModelByVisibleText("UIModel." + blockType.toLowerCase() + ".cluster", clusterName);
-        }
-
-        public void setPath(String path) {
-            final WebElement srcPathElement = getPath();
-            clearAndSet(srcPathElement, path);
-        }
-
-        public boolean isRunHereSelected() {
-            return runHereButton.getAttribute("class").contains("ng-valid-parse");
-        }
-
-        public boolean isRunHereAvailable() {
-            return runHereButton.getAttribute("disabled") == null;
-        }
-
-
-        private WebElement getLocationBox() {
-            return mainBlock.findElement(By.className("locationBox"));
-        }
-
-        private WebElement getPath() {
-            return mainBlock.findElement(By.name(blockType.toLowerCase() + "ClusterPathInput"));
-        }
-
-
-
-    }
-
-    /**
-     * Types of source/target location.
-     */
-    public enum Location {
-        HDFS,
-        AZURE,
-        S3;
-
-        private static Location getByInput(WebElement input) {
-            return Location.valueOf(input.getAttribute("value").trim().toUpperCase());
-        }
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java
index 61d9475..15d27bd 100644
--- a/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java
+++ b/falcon-regression/merlin/src/main/java/org/apache/falcon/regression/ui/search/PageHeader.java
@@ -166,7 +166,6 @@ public class PageHeader {
             driver.get(oldUrl);
             doCreateProcess();
             driver.get(oldUrl);
-            doCreateMirror();
             driver.get(oldUrl);
         }
         //home button navigation
@@ -232,16 +231,6 @@ public class PageHeader {
         return processPage;
     }
 
-    public MirrorWizardPage doCreateMirror() {
-        UIAssert.assertDisplayed(mirrorCreateButton, "Mirror create button");
-        Assert.assertEquals(mirrorCreateButton.getText(), "Mirror",
-            "Unexpected text on create mirror button");
-        mirrorCreateButton.click();
-        final MirrorWizardPage mirrorPage = PageFactory.initElements(driver, MirrorWizardPage.class);
-        mirrorPage.checkPage();
-        return mirrorPage;
-    }
-
     private List<String> getHomeUrls() {
         List<String> urls = new ArrayList<>();
         String homeUrl = MerlinConstants.PRISM_URL;

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HdfsRecipeTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HdfsRecipeTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HdfsRecipeTest.java
deleted file mode 100644
index 07996d5..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HdfsRecipeTest.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.hive.dr;
-
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.Entities.RecipeMerlin;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.supportClasses.ExecResult;
-import org.apache.falcon.regression.core.util.AssertUtil;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.HadoopUtil;
-import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.MatrixUtil;
-import org.apache.falcon.regression.core.util.OSUtil;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.testHelper.BaseTestClass;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.CoordinatorAction;
-import org.apache.oozie.client.OozieClient;
-import org.testng.Assert;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- * Hdfs recipe test.
- */
-@Test(groups = {"embedded", "multiCluster"})
-public class HdfsRecipeTest extends BaseTestClass {
-    private static final Logger LOGGER = Logger.getLogger(HdfsRecipeTest.class);
-    private final ColoHelper cluster = servers.get(0);
-    private final ColoHelper cluster2 = servers.get(1);
-    private final FileSystem clusterFS = serverFS.get(0);
-    private final FileSystem clusterFS2 = serverFS.get(1);
-    private final OozieClient clusterOC = serverOC.get(0);
-    private final OozieClient clusterOC2 = serverOC.get(1);
-    private final String baseTestHDFSDir = cleanAndGetTestDir() + "/HdfsDR";
-    private String sourceDataLocation = baseTestHDFSDir + "/source";
-    private String targetDataLocation = baseTestHDFSDir + "/target";
-    private RecipeMerlin hdfsRecipe;
-
-    @DataProvider
-    public Object[][] getRecipeLocation() {
-        return MatrixUtil.crossProduct(RecipeExecLocation.values());
-    }
-
-    private void setUp(RecipeExecLocation recipeExecLocation) throws Exception {
-        bundles[0] = new Bundle(BundleUtil.readELBundle(), cluster);
-        bundles[1] = new Bundle(BundleUtil.readELBundle(), cluster2);
-        bundles[0].generateUniqueBundle(this);
-        bundles[1].generateUniqueBundle(this);
-        final ClusterMerlin srcCluster = bundles[0].getClusterElement();
-        final ClusterMerlin tgtCluster = bundles[1].getClusterElement();
-        String recipeDir = "HdfsRecipe";
-        Bundle.submitCluster(recipeExecLocation.getRecipeBundle(bundles[0], bundles[1]));
-        hdfsRecipe = RecipeMerlin.readFromDir(recipeDir, FalconCLI.RecipeOperation.HDFS_REPLICATION)
-            .withRecipeCluster(recipeExecLocation.getRecipeCluster(srcCluster, tgtCluster));
-        hdfsRecipe.withSourceCluster(srcCluster)
-            .withTargetCluster(tgtCluster)
-            .withFrequency(new Frequency("5", Frequency.TimeUnit.minutes))
-            .withValidity(TimeUtil.getTimeWrtSystemTime(-5), TimeUtil.getTimeWrtSystemTime(15));
-        hdfsRecipe.setUniqueName(this.getClass().getSimpleName());
-    }
-
-    /**
-     * Test recipe based replication with 1 source and 1 target.
-     */
-    @Test(dataProvider = "getRecipeLocation")
-    public void test1Source1Target(RecipeExecLocation execLocation) throws Exception {
-        setUp(execLocation);
-        hdfsRecipe.withSourceDir(sourceDataLocation).withTargetDir(targetDataLocation);
-        final List<String> command = hdfsRecipe.getSubmissionCommand();
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(execLocation.getRecipeOC(clusterOC, clusterOC2),
-            hdfsRecipe.getName(), 1, CoordinatorAction.Status.WAITING, EntityType.PROCESS);
-
-        HadoopUtil.copyDataToFolder(clusterFS, sourceDataLocation, OSUtil.NORMAL_INPUT);
-
-        InstanceUtil.waitTillInstanceReachState(execLocation.getRecipeOC(clusterOC, clusterOC2),
-            hdfsRecipe.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        //check if data has been replicated correctly
-        List<Path> cluster1ReplicatedData = HadoopUtil
-            .getAllFilesRecursivelyHDFS(clusterFS, new Path(sourceDataLocation));
-        List<Path> cluster2ReplicatedData = HadoopUtil
-            .getAllFilesRecursivelyHDFS(clusterFS2, new Path(targetDataLocation));
-
-        AssertUtil.checkForListSizes(cluster1ReplicatedData, cluster2ReplicatedData);
-
-        //particular check for https://issues.apache.org/jira/browse/FALCON-1643
-        ExecResult execResult = cluster.getProcessHelper().getCLIMetrics(hdfsRecipe.getName());
-        AssertUtil.assertCLIMetrics(execResult, hdfsRecipe.getName(), 1, true);
-    }
-
-    @AfterMethod(alwaysRun = true)
-    public void tearDown() throws IOException {
-        try {
-            prism.getProcessHelper().deleteByName(hdfsRecipe.getName(), null);
-        } catch (Exception e) {
-            LOGGER.info("Deletion of process: " + hdfsRecipe.getName() + " failed with exception: " + e);
-        }
-        removeTestClassEntities();
-        cleanTestsDirs();
-    }
-}

http://git-wip-us.apache.org/repos/asf/falcon/blob/c00975e4/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDRTest.java
----------------------------------------------------------------------
diff --git a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDRTest.java b/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDRTest.java
deleted file mode 100644
index 7cd71e1..0000000
--- a/falcon-regression/merlin/src/test/java/org/apache/falcon/regression/hive/dr/HiveDRTest.java
+++ /dev/null
@@ -1,736 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.regression.hive.dr;
-
-import org.apache.falcon.cli.FalconCLI;
-import org.apache.falcon.entity.v0.EntityType;
-import org.apache.falcon.entity.v0.Frequency;
-import org.apache.falcon.regression.Entities.ClusterMerlin;
-import org.apache.falcon.regression.Entities.RecipeMerlin;
-import org.apache.falcon.regression.core.bundle.Bundle;
-import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
-import org.apache.falcon.regression.core.helpers.ColoHelper;
-import org.apache.falcon.regression.core.supportClasses.ExecResult;
-import org.apache.falcon.regression.core.util.AssertUtil;
-import org.apache.falcon.regression.core.supportClasses.NotifyingAssert;
-import org.apache.falcon.regression.core.util.BundleUtil;
-import org.apache.falcon.regression.core.util.HiveAssert;
-import org.apache.falcon.regression.core.util.InstanceUtil;
-import org.apache.falcon.regression.core.util.MatrixUtil;
-import org.apache.falcon.regression.core.util.OozieUtil;
-import org.apache.falcon.regression.core.util.TimeUtil;
-import org.apache.falcon.regression.testHelper.BaseTestClass;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hive.hcatalog.api.HCatClient;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.BundleJob;
-import org.apache.oozie.client.CoordinatorAction;
-import org.apache.oozie.client.CoordinatorJob;
-import org.apache.oozie.client.OozieClient;
-import org.testng.Assert;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-import org.testng.asserts.SoftAssert;
-
-import java.io.IOException;
-import java.sql.Connection;
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.apache.falcon.regression.core.util.HiveUtil.runSql;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.bootstrapCopy;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createExternalTable;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createExternalPartitionedTable;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createPartitionedTable;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createSerDeTable;
-import static org.apache.falcon.regression.hive.dr.HiveObjectCreator.createVanillaTable;
-
-/**
- * Hive DR Testing.
- */
-@Test(groups = {"embedded", "multiCluster"})
-public class HiveDRTest extends BaseTestClass {
-    private static final Logger LOGGER = Logger.getLogger(HiveDRTest.class);
-    private static final String DB_NAME = "hdr_sdb1";
-    private final ColoHelper cluster = servers.get(0);
-    private final ColoHelper cluster2 = servers.get(1);
-    private final ColoHelper cluster3 = servers.get(2);
-    private final FileSystem clusterFS = serverFS.get(0);
-    private final FileSystem clusterFS2 = serverFS.get(1);
-    private final FileSystem clusterFS3 = serverFS.get(2);
-    private final OozieClient clusterOC = serverOC.get(0);
-    private final OozieClient clusterOC2 = serverOC.get(1);
-    private final OozieClient clusterOC3 = serverOC.get(2);
-    private final String baseTestHDFSDir = cleanAndGetTestDir() + "/HiveDR/";
-    private HCatClient clusterHC;
-    private HCatClient clusterHC2;
-    private RecipeMerlin recipeMerlin;
-    private Connection connection;
-    private Connection connection2;
-
-    @DataProvider
-    public Object[][] getRecipeLocation() {
-        return MatrixUtil.crossProduct(RecipeExecLocation.values());
-    }
-
-    private void setUp(RecipeExecLocation recipeExecLocation) throws Exception {
-        clusterHC = cluster.getClusterHelper().getHCatClient();
-        clusterHC2 = cluster2.getClusterHelper().getHCatClient();
-        bundles[0] = new Bundle(BundleUtil.readHCatBundle(), cluster);
-        bundles[1] = new Bundle(BundleUtil.readHCatBundle(), cluster2);
-        bundles[0].generateUniqueBundle(this);
-        bundles[1].generateUniqueBundle(this);
-        final ClusterMerlin srcCluster = bundles[0].getClusterElement();
-        final ClusterMerlin tgtCluster = bundles[1].getClusterElement();
-        String recipeDir = "HiveDrRecipe";
-        if (MerlinConstants.IS_SECURE) {
-            recipeDir = "HiveDrSecureRecipe";
-        }
-        Bundle.submitCluster(recipeExecLocation.getRecipeBundle(bundles[0], bundles[1]));
-        recipeMerlin = RecipeMerlin.readFromDir(recipeDir, FalconCLI.RecipeOperation.HIVE_DISASTER_RECOVERY)
-            .withRecipeCluster(recipeExecLocation.getRecipeCluster(srcCluster, tgtCluster));
-        recipeMerlin.withSourceCluster(srcCluster)
-            .withTargetCluster(tgtCluster)
-            .withFrequency(new Frequency("5", Frequency.TimeUnit.minutes))
-            .withValidity(TimeUtil.getTimeWrtSystemTime(-5), TimeUtil.getTimeWrtSystemTime(15));
-        recipeMerlin.setUniqueName(this.getClass().getSimpleName());
-
-        connection = cluster.getClusterHelper().getHiveJdbcConnection();
-        runSql(connection, "drop database if exists hdr_sdb1 cascade");
-        runSql(connection, "create database hdr_sdb1");
-        runSql(connection, "use hdr_sdb1");
-
-        connection2 = cluster2.getClusterHelper().getHiveJdbcConnection();
-        runSql(connection2, "drop database if exists hdr_sdb1 cascade");
-        runSql(connection2, "create database hdr_sdb1");
-        runSql(connection2, "use hdr_sdb1");
-    }
-
-    @Test(dataProvider = "getRecipeLocation")
-    public void drPartition(final RecipeExecLocation recipeExecLocation) throws Exception {
-        setUp(recipeExecLocation);
-        final String tblName = "partitionDR";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'DELETE') values"
-                + "('this partition is going to be deleted - should NOT appear after dr')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'REPLACE') values"
-                + "('this partition is going to be replaced - should NOT appear after dr')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'ADD_DATA') values"
-                + "('this partition will have more data - should appear after dr')");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'NEW_PART') values"
-                + "('this partition has been added post bootstrap - should appear after dr')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'ADD_DATA') values"
-                + "('more data has been added post bootstrap - should appear after dr')");
-        runSql(connection,
-            "alter table " + tblName + " drop partition(pname = 'DELETE')");
-        runSql(connection,
-            "alter table " + tblName + " drop partition(pname = 'REPLACE')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'REPLACE') values"
-                + "('this partition has been replaced - should appear after dr')");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-
-        ExecResult execResult = cluster.getProcessHelper().getCLIMetrics(recipeMerlin.getName());
-        AssertUtil.assertCLIMetrics(execResult, recipeMerlin.getName(), 1, true);
-    }
-
-    @Test
-    public void drInsertOverwritePartition() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "drInsertOverwritePartition";
-        final String hlpTblName = "drInsertOverwritePartitionHelperTbl";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-        runSql(connection, "create table " + hlpTblName + "(comment string)");
-        runSql(connection,
-            "insert into table " + hlpTblName
-                + " values('overwrite data - should appear after dr')");
-        runSql(connection,
-            "insert into table " + hlpTblName + " values('newdata row2 - should appear after dr')");
-        runSql(connection,
-            "insert into table " + hlpTblName + " values('newdata row1 - should appear after dr')");
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'OLD_PART') values"
-                + "('this data should be retained - should appear after dr')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname = 'OVERWRITE_PART') values"
-                + "('this data should get overwritten - should NOT appear after dr')");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert overwrite table " + tblName + " partition (pname = 'OVERWRITE_PART') "
-                + "select * from " + hlpTblName + " where comment REGEXP '^overwrite'");
-        runSql(connection,
-            "insert overwrite table " + tblName + " partition (pname = 'NEW_DATA') "
-                + "select * from " + hlpTblName + " where comment REGEXP '^newdata'");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    @Test
-    public void drTwoTablesOneRequest() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.TargetCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "firstTableDR";
-        final String tbl2Name = "secondTableDR";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName + ',' + tbl2Name);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        runSql(connection,
-            "create table " + tblName + "(comment string)");
-        runSql(connection,
-            "create table " + tbl2Name + "(comment string)");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-        bootstrapCopy(connection, clusterFS, tbl2Name, connection2, clusterFS2, tbl2Name);
-
-        runSql(connection,
-            "insert into table " + tblName + " values"
-                + "('this string has been added post bootstrap - should appear after dr')");
-        runSql(connection,
-            "insert into table " + tbl2Name + " values"
-                + "('this string has been added post bootstrap - should appear after dr')");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tbl2Name),
-            cluster2, clusterHC2.getTable(DB_NAME, tbl2Name), anAssert);
-        anAssert.assertAll();
-
-    }
-
-    @Test
-    public void drSerDeWithProperties() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "serdeTable";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) "
-                + "row format serde 'org.apache.hive.hcatalog.data.JsonSerDe'");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert into table " + tblName + " values"
-                + "('this string has been added post bootstrap - should appear after dr')");
-
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET SERDEPROPERTIES ('someProperty' = 'value')");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-
-    }
-
-    @Test
-    public void drChangeColumn() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "tableForColumnChange";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command1 = recipeMerlin.getSubmissionCommand();
-        final String recipe1Name = recipeMerlin.getName();
-        runSql(connection,
-            "create table " + tblName + "(id int)");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command1), 0, "Recipe submission failed.");
-        runSql(connection,
-            "ALTER TABLE " + tblName + " CHANGE id id STRING COMMENT 'some_comment'");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipe1Name, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-
-    @Test
-    public void drTwoDstTablesTwoRequests() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.TargetCluster;
-        setUp(recipeExecLocation);
-        final HCatClient clusterHC3 = cluster3.getClusterHelper().getHCatClient();
-        final Connection connection3 = cluster3.getClusterHelper().getHiveJdbcConnection();
-        runSql(connection3, "drop database if exists hdr_sdb1 cascade");
-        runSql(connection3, "create database hdr_sdb1");
-        runSql(connection3, "use hdr_sdb1");
-
-        final String tblName = "vanillaTable";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final String recipe1Name = recipeMerlin.getName();
-        final List<String> command1 = recipeMerlin.getSubmissionCommand();
-
-        final Bundle bundle3 = new Bundle(BundleUtil.readHCatBundle(), cluster3);
-        bundle3.generateUniqueBundle(this);
-        bundle3.submitClusters(prism);
-        recipeMerlin.withTargetCluster(bundle3.getClusterElement())
-                .withRecipeCluster(recipeExecLocation.getRecipeCluster(
-                        bundles[0].getClusterElement(), bundle3.getClusterElement()));
-        recipeMerlin.setUniqueName(this.getClass().getSimpleName());
-
-        final List<String> command2 = recipeMerlin.getSubmissionCommand();
-        final String recipe2Name = recipeMerlin.getName();
-
-        runSql(connection, "create table " + tblName + "(comment string)");
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-        bootstrapCopy(connection, clusterFS, tblName, connection3, clusterFS3, tblName);
-
-        runSql(connection,
-            "insert into table " + tblName + " values"
-                + "('this string has been added post bootstrap - should appear after dr')");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command1), 0, "Recipe submission failed.");
-        Assert.assertEquals(Bundle.runFalconCLI(command2), 0, "Recipe submission failed.");
-
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipe1Name, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC3),
-            recipe2Name, 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster3, clusterHC3.getTable(DB_NAME, tblName), anAssert);
-        anAssert.assertAll();
-    }
-
-    @Test
-    public void drExternalToNonExternal() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "externalToNonExternal";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        createExternalTable(connection, clusterFS, baseTestHDFSDir + "click_data/", tblName);
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        //change column name
-        runSql(connection,
-            "alter table " + tblName + " change column data data_new string");
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert, false);
-        anAssert.assertNotEquals(clusterHC2.getTable(DB_NAME, tblName).getTabletype(),
-            clusterHC.getTable(DB_NAME, tblName).getTableName(),
-            "Source and destination tables should have different Tabletype");
-        anAssert.assertNotEquals(clusterHC2.getTable(DB_NAME, tblName).getTblProps().get("EXTERNAL"),
-            clusterHC.getTable(DB_NAME, tblName).getTblProps().get("EXTERNAL"),
-            "Source and destination tables should have different value of property EXTERNAL");
-        anAssert.assertAll();
-    }
-
-    @Test
-    public void drExtPartitionedToNonExtPartitioned() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "extPartitionedToNonExtPartitioned";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        createExternalPartitionedTable(connection, clusterFS,
-            baseTestHDFSDir + "click_data/", tblName);
-        runSql(connection2,
-            "create table " + tblName + " (data string, time string) partitioned by (date_ string)");
-        runSql(connection2, "alter table " + tblName + " add partition "
-            + "(date_='2001-01-01') location '" + baseTestHDFSDir + "click_data/2001-01-01/'");
-        runSql(connection2, "alter table " + tblName + " add partition "
-            + "(date_='2001-01-02') location '" + baseTestHDFSDir + "click_data/2001-01-02/'");
-
-        runSql(connection2, "insert into table " + tblName + " partition (date_='2001-01-01') "
-            + "values ('click1', '01:01:01')");
-        runSql(connection2, "insert into table " + tblName + " partition (date_='2001-01-02') "
-            + "values ('click2', '02:02:02')");
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert, false);
-
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        //change column name
-        runSql(connection,
-            "alter table " + tblName + " change column data data_new string");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), anAssert, false);
-        anAssert.assertNotEquals(clusterHC2.getTable(DB_NAME, tblName).getTabletype(),
-            clusterHC.getTable(DB_NAME, tblName).getTableName(),
-            "Source and destination tables should have different Tabletype");
-        anAssert.assertNotEquals(clusterHC2.getTable(DB_NAME, tblName).getTblProps().get("EXTERNAL"),
-            clusterHC.getTable(DB_NAME, tblName).getTblProps().get("EXTERNAL"),
-            "Source and destination tables should have different value of property EXTERNAL");
-        anAssert.assertAll();
-    }
-
-    /**
-     * 1 src tbl 1 dst tbl. Change table properties and comment at the source.
-     * Changes should get reflected at destination.
-     */
-    @Test
-    public void drChangeCommentAndPropertyTest() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "myTable";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        runSql(connection, "create table " + tblName + "(field string)");
-        //add new table property
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET TBLPROPERTIES('someProperty' = 'initialValue')");
-        //set comment
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET TBLPROPERTIES('comment' = 'this comment will be "
-                + "changed, SHOULD NOT appear')");
-
-        LOGGER.info(tblName + " before bootstrap copy: ");
-        runSql(connection, "describe extended " + tblName);
-
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        //change table property and comment
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET TBLPROPERTIES('someProperty' = 'anotherValue')");
-        runSql(connection,
-            "ALTER TABLE " + tblName + " SET TBLPROPERTIES('comment' = 'this comment should "
-                + "appear after replication done')");
-
-        LOGGER.info(tblName + " after modifications, before replication: ");
-        runSql(connection, "describe extended " + tblName);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    @Test
-    public void dataGeneration() throws Exception {
-        setUp(RecipeExecLocation.SourceCluster);
-        runSql(connection, "use hdr_sdb1");
-        createVanillaTable(connection, "store_sales");
-        createSerDeTable(connection);
-        createPartitionedTable(connection);
-        createExternalTable(connection, clusterFS,
-            baseTestHDFSDir + "click_data/", "click_data");
-        createExternalPartitionedTable(connection, clusterFS,
-            baseTestHDFSDir + "click_data2/", "click_data2");
-
-        runSql(connection2, "use hdr_sdb1");
-        createVanillaTable(connection2, "store_sales");
-        createSerDeTable(connection2);
-        createPartitionedTable(connection2);
-        createExternalTable(connection2, clusterFS2,
-            baseTestHDFSDir + "click_data/", "click_data");
-        createExternalPartitionedTable(connection2, clusterFS2,
-            baseTestHDFSDir + "click_data2/", "click_data2");
-
-        final NotifyingAssert anAssert = new NotifyingAssert(true);
-        HiveAssert.assertDbEqual(cluster, clusterHC.getDatabase("hdr_sdb1"),
-            cluster2, clusterHC2.getDatabase("hdr_sdb1"), anAssert);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable("hdr_sdb1", "click_data"),
-            cluster2, clusterHC2.getTable("hdr_sdb1", "click_data"), anAssert);
-        anAssert.assertAll();
-
-    }
-
-    @Test(enabled = false)
-    public void assertionTest() throws Exception {
-        setUp(RecipeExecLocation.SourceCluster);
-        final SoftAssert anAssert = new SoftAssert();
-        HiveAssert.assertTableEqual(
-            cluster, clusterHC.getTable("default", "hcatsmoke10546"),
-            cluster2, clusterHC2.getTable("default", "hcatsmoke10548"), anAssert);
-        HiveAssert.assertDbEqual(cluster, clusterHC.getDatabase("default"), cluster2,
-            clusterHC2.getDatabase("default"), anAssert);
-        anAssert.assertAll();
-    }
-
-    /**
-     * Test creates a table on first cluster using static partitioning. Then it creates the same
-     * table on the second cluster using dynamic partitioning. Finally it checks the equality of
-     * these tables.
-     * @throws SQLException
-     * @throws IOException
-     */
-    @Test
-    public void dynamicPartitionsTest() throws Exception {
-        setUp(RecipeExecLocation.SourceCluster);
-        //create table with static partitions on first cluster
-        createPartitionedTable(connection, false);
-
-        //create table with dynamic partitions on second cluster
-        createPartitionedTable(connection2, true);
-
-        //check that both tables are equal
-        HiveAssert.assertTableEqual(
-            cluster, clusterHC.getTable("hdr_sdb1", "global_store_sales"),
-            cluster2, clusterHC2.getTable("hdr_sdb1", "global_store_sales"), new SoftAssert()
-        ).assertAll();
-    }
-
-    /**
-     * 1 src tbl 1 dst tbl replication. Insert/delete/replace partitions using dynamic partition
-     * queries. The changes should get reflected at destination.
-     */
-    @Test
-    public void drInsertDropReplaceDynamicPartition() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "dynamicPartitionDR";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        //disable strict mode to use only dynamic partition
-        runSql(connection, "set hive.exec.dynamic.partition.mode=nonstrict");
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition is going to be deleted - should NOT appear after dr', 'DELETE')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition is going to be replaced - should NOT appear after dr', 'REPLACE')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition will have more data - should appear after dr', 'ADD_DATA')");
-
-        LOGGER.info(tblName + " before bootstrap copying: ");
-        runSql(connection, "select * from " + tblName);
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition has been added post bootstrap - should appear after dr', 'NEW_PART')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('more data has been added post bootstrap - should appear after dr', 'ADD_DATA')");
-        runSql(connection,
-            "alter table " + tblName + " drop partition(pname = 'DELETE')");
-        runSql(connection,
-            "alter table " + tblName + " drop partition(pname = 'REPLACE')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this partition has been replaced - should appear after dr', 'REPLACE')");
-
-        LOGGER.info(tblName + " after modifications, before replication: ");
-        runSql(connection, "select * from " + tblName);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    /**
-     * 1 src tbl 1 dst tbl replication. Insert/overwrite partitions using dynamic partitions
-     * queries. The changes should get reflected at destination.
-     * @throws Exception
-     */
-    @Test
-    public void drInsertOverwriteDynamicPartition() throws Exception {
-        final RecipeExecLocation recipeExecLocation = RecipeExecLocation.SourceCluster;
-        setUp(recipeExecLocation);
-        final String tblName = "drInsertOverwritePartition";
-        final String hlpTblName = "drInsertOverwritePartitionHelperTbl";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName);
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-
-        //disable strict mode to use only dynamic partition
-        runSql(connection, "set hive.exec.dynamic.partition.mode=nonstrict");
-
-        runSql(connection,
-            "create table " + hlpTblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + hlpTblName + " partition (pname)"
-                + " values('overwrite data - should appear after dr', 'OVERWRITE_PART')");
-        runSql(connection,
-            "insert into table " + hlpTblName + " partition (pname)"
-            + " values('newdata row2 - should appear after dr', 'NEW_DATA')");
-        runSql(connection,
-            "insert into table " + hlpTblName + " partition (pname)"
-                + " values('newdata row1 - should appear after dr', 'NEW_DATA')");
-
-        runSql(connection,
-            "create table " + tblName + "(comment string) partitioned by (pname string)");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this data should be retained - should appear after dr', 'OLD_PART')");
-        runSql(connection,
-            "insert into table " + tblName + " partition (pname) values"
-                + "('this data should get overwritten - should NOT appear after dr', 'OVERWRITE_PART')");
-
-        LOGGER.info(tblName + " before bootstrap copying: ");
-        runSql(connection, "select * from " + tblName);
-        bootstrapCopy(connection, clusterFS, tblName, connection2, clusterFS2, tblName);
-
-        runSql(connection,
-            "insert overwrite table " + tblName + " partition (pname) "
-                + "select comment, pname from " + hlpTblName + " where comment REGEXP '^overwrite'");
-        runSql(connection,
-            "insert overwrite table " + tblName + " partition (pname) "
-                + "select comment, pname from " + hlpTblName + " where comment REGEXP '^newdata'");
-
-        LOGGER.info(tblName + " after modifications, before replication: ");
-        runSql(connection, "select * from " + tblName);
-
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-
-        InstanceUtil.waitTillInstanceReachState(recipeExecLocation.getRecipeOC(clusterOC, clusterOC2),
-            recipeMerlin.getName(), 1, CoordinatorAction.Status.SUCCEEDED, EntityType.PROCESS);
-
-        HiveAssert.assertTableEqual(cluster, clusterHC.getTable(DB_NAME, tblName),
-            cluster2, clusterHC2.getTable(DB_NAME, tblName), new NotifyingAssert(true)
-        ).assertAll();
-    }
-
-    /**
-     * Run recipe with different frequencies. Submission should go through.
-     * Check frequency of the launched oozie job
-     */
-    @Test(dataProvider = "frequencyGenerator")
-    public void differentRecipeFrequenciesTest(String frequency) throws Exception {
-        setUp(RecipeExecLocation.SourceCluster);
-        LOGGER.info("Testing with frequency: " + frequency);
-        String tblName = "myTable";
-        recipeMerlin.withSourceDb(DB_NAME).withSourceTable(tblName)
-            .withFrequency(new Frequency(frequency));
-        runSql(connection, "create table " + tblName + "(comment string)");
-        final List<String> command = recipeMerlin.getSubmissionCommand();
-        Assert.assertEquals(Bundle.runFalconCLI(command), 0, "Recipe submission failed.");
-        LOGGER.info("Submission went through.");
-
-        InstanceUtil.waitTillInstanceReachState(clusterOC, recipeMerlin.getName(), 1,
-            CoordinatorAction.Status.RUNNING, EntityType.PROCESS);
-        String filter = "name=FALCON_PROCESS_" + recipeMerlin.getName();
-        List<BundleJob> bundleJobs = OozieUtil.getBundles(clusterOC, filter, 0, 10);
-        List<String> bundleIds = OozieUtil.getBundleIds(bundleJobs);
-        String bundleId = OozieUtil.getMaxId(bundleIds);
-        List<CoordinatorJob> coords = clusterOC.getBundleJobInfo(bundleId).getCoordinators();
-        List<String> cIds = new ArrayList<String>();
-        for (CoordinatorJob coord : coords) {
-            cIds.add(coord.getId());
-        }
-        String coordId = OozieUtil.getMinId(cIds);
-        CoordinatorJob job = clusterOC.getCoordJobInfo(coordId);
-        CoordinatorJob.Timeunit timeUnit = job.getTimeUnit();
-        String freq = job.getFrequency();
-        LOGGER.info("Frequency of running job: " + timeUnit + " " + freq);
-        String unit = timeUnit.name().toLowerCase().replace("_", "");
-        if (frequency.contains("hours")) {
-            unit = "hours";
-            freq = String.valueOf(Integer.parseInt(freq) / 60);
-        }
-        Assert.assertTrue(frequency.contains(unit)
-            && frequency.contains(freq), "Running job has different frequency.");
-    }
-
-    @DataProvider(name = "frequencyGenerator")
-    public Object[][] frequencyGenerator() {
-        return new Object[][]{{"minutes(10)"}, {"minutes(10000)"}, {"hours(5)"}, {"hours(5000)"},
-            {"days(3)"}, {"days(3000)"}, {"months(1)"}, {"months(1000)"}, };
-    }
-
-    @AfterMethod(alwaysRun = true)
-    public void tearDown() throws IOException {
-        try {
-            prism.getProcessHelper().deleteByName(recipeMerlin.getName(), null);
-        } catch (Exception e) {
-            LOGGER.info("Deletion of process: " + recipeMerlin.getName() + " failed with exception: " + e);
-        }
-        removeTestClassEntities();
-        cleanTestsDirs();
-    }
-
-}