You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@falcon.apache.org by ve...@apache.org on 2014/02/16 04:31:22 UTC

[1/5] FALCON-11 Add support for security in Falcon. Contributed by Venkatesh Seetharam

Repository: incubator-falcon
Updated Branches:
  refs/heads/master 2cb42dfff -> 3c51f1053


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedReplicationIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedReplicationIT.java b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedReplicationIT.java
index dbc6442..219de3f 100644
--- a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedReplicationIT.java
+++ b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedReplicationIT.java
@@ -73,7 +73,7 @@ public class TableStorageFeedReplicationIT {
         TestContext.cleanupStore();
 
         Map<String, String> overlay = sourceContext.getUniqueOverlay();
-        String sourceFilePath = sourceContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
+        String sourceFilePath = TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
         sourceContext.setCluster(sourceFilePath);
 
         final Cluster sourceCluster = sourceContext.getCluster().getCluster();
@@ -88,7 +88,7 @@ public class TableStorageFeedReplicationIT {
         HiveTestUtils.loadData(sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME, sourcePath,
                 PARTITION_VALUE);
 
-        String targetFilePath = targetContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
+        String targetFilePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
         targetContext.setCluster(targetFilePath);
 
         final Cluster targetCluster = targetContext.getCluster().getCluster();
@@ -144,17 +144,17 @@ public class TableStorageFeedReplicationIT {
     public void testTableReplication() throws Exception {
         final String feedName = "customer-table-replicating-feed";
         final Map<String, String> overlay = sourceContext.getUniqueOverlay();
-        String filePath = sourceContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
+        String filePath = TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = targetContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
         HCatPartition sourcePartition = HiveTestUtils.getPartition(
                 sourceMetastoreUrl, SOURCE_DATABASE_NAME, SOURCE_TABLE_NAME, "ds", PARTITION_VALUE);
         Assert.assertNotNull(sourcePartition);
 
-        filePath = sourceContext.overlayParametersOverTemplate("/table/customer-table-replicating-feed.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/customer-table-replicating-feed.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
 
         // wait until the workflow job completes
@@ -168,7 +168,7 @@ public class TableStorageFeedReplicationIT {
         Assert.assertNotNull(targetPartition);
 
         InstancesResult response = targetContext.getService().path("api/instance/running/feed/" + feedName)
-                .header("Remote-User", "guest")
+                .header("Cookie", targetContext.getAuthenticationToken())
                 .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(response.getStatus(), APIResult.Status.SUCCEEDED);
@@ -182,10 +182,10 @@ public class TableStorageFeedReplicationIT {
     public void testTableReplicationWithExistingTargetPartition() throws Exception {
         final String feedName = "customer-table-replicating-feed";
         final Map<String, String> overlay = sourceContext.getUniqueOverlay();
-        String filePath = sourceContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
+        String filePath = TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = targetContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
         HCatPartition sourcePartition = HiveTestUtils.getPartition(
@@ -199,7 +199,7 @@ public class TableStorageFeedReplicationIT {
                 targetMetastoreUrl, TARGET_DATABASE_NAME, TARGET_TABLE_NAME, "ds", PARTITION_VALUE);
         Assert.assertNotNull(targetPartition);
 
-        filePath = sourceContext.overlayParametersOverTemplate("/table/customer-table-replicating-feed.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/customer-table-replicating-feed.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
 
         // wait until the workflow job completes
@@ -213,7 +213,7 @@ public class TableStorageFeedReplicationIT {
         Assert.assertNotNull(targetPartition);
 
         InstancesResult response = targetContext.getService().path("api/instance/running/feed/" + feedName)
-                .header("Remote-User", "guest")
+                .header("Cookie", targetContext.getAuthenticationToken())
                 .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(response.getStatus(), APIResult.Status.SUCCEEDED);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java b/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java
index 1f4e9e8..51def35 100644
--- a/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java
+++ b/webapp/src/test/java/org/apache/falcon/process/PigProcessIT.java
@@ -58,7 +58,7 @@ public class PigProcessIT {
 
         overlay = context.getUniqueOverlay();
 
-        String filePath = context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        // String filePath = TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
         context.setCluster(overlay.get("cluster"));
 
         final Cluster cluster = context.getCluster().getCluster();
@@ -88,22 +88,22 @@ public class PigProcessIT {
     public void testSubmitAndSchedulePigProcess() throws Exception {
         overlay.put("cluster", "primary-cluster");
 
-        String filePath = context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        String filePath = TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
         // context.setCluster(filePath);
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
                 TestContext.executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
         Assert.assertEquals(0,
                 TestContext.executeWithURL("entity -submit -type feed -file " + filePath));
 
         final String pigProcessName = "pig-" + context.getProcessName();
         overlay.put("processName", pigProcessName);
 
-        filePath = context.overlayParametersOverTemplate(TestContext.PIG_PROCESS_TEMPLATE, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.PIG_PROCESS_TEMPLATE, overlay);
         Assert.assertEquals(0,
                 TestContext.executeWithURL("entity -submitAndSchedule -type process -file " + filePath));
 
@@ -112,7 +112,7 @@ public class PigProcessIT {
         Assert.assertEquals(WorkflowJob.Status.SUCCEEDED, jobInfo.getStatus());
 
         InstancesResult response = context.getService().path("api/instance/running/process/" + pigProcessName)
-                .header("Remote-User", "guest")
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/process/TableStorageProcessIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/process/TableStorageProcessIT.java b/webapp/src/test/java/org/apache/falcon/process/TableStorageProcessIT.java
index 2d539c2..91662d4 100644
--- a/webapp/src/test/java/org/apache/falcon/process/TableStorageProcessIT.java
+++ b/webapp/src/test/java/org/apache/falcon/process/TableStorageProcessIT.java
@@ -69,7 +69,7 @@ public class TableStorageProcessIT {
         TestContext.prepare(CLUSTER_TEMPLATE);
 
         overlay = context.getUniqueOverlay();
-        String filePath = context.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay);
+        String filePath = TestContext.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay);
         context.setCluster(filePath);
 
         final Cluster cluster = context.getCluster().getCluster();
@@ -117,14 +117,14 @@ public class TableStorageProcessIT {
     private void scheduleFeeds() throws Exception {
         overlay.put("cluster", "primary-cluster");
 
-        String filePath = context.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay);
+        String filePath = TestContext.overlayParametersOverTemplate(CLUSTER_TEMPLATE, overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate("/table/table-feed-input.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/table-feed-input.xml", overlay);
         Assert.assertEquals(0,
                 TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate("/table/table-feed-output.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/table-feed-output.xml", overlay);
         Assert.assertEquals(0,
                 TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
     }
@@ -161,7 +161,7 @@ public class TableStorageProcessIT {
         final String pigProcessName = "pig-tables-" + context.getProcessName();
         overlay.put("processName", pigProcessName);
 
-        String filePath = context.overlayParametersOverTemplate("/table/pig-process-tables.xml", overlay);
+        String filePath = TestContext.overlayParametersOverTemplate("/table/pig-process-tables.xml", overlay);
         Assert.assertEquals(0,
                 TestContext.executeWithURL("entity -submitAndSchedule -type process -file " + filePath));
 
@@ -174,7 +174,7 @@ public class TableStorageProcessIT {
         Assert.assertTrue(partition != null);
 
         InstancesResult response = context.getService().path("api/instance/running/process/" + pigProcessName)
-                .header("Remote-User", "guest")
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());
@@ -188,7 +188,7 @@ public class TableStorageProcessIT {
         final String hiveProcessName = "hive-tables-" + context.getProcessName();
         overlay.put("processName", hiveProcessName);
 
-        String filePath = context.overlayParametersOverTemplate("/table/hive-process-template.xml", overlay);
+        String filePath = TestContext.overlayParametersOverTemplate("/table/hive-process-template.xml", overlay);
         Assert.assertEquals(0,
                 TestContext.executeWithURL("entity -submitAndSchedule -type process -file " + filePath));
 
@@ -201,7 +201,7 @@ public class TableStorageProcessIT {
         Assert.assertTrue(partition != null);
 
         InstancesResult response = context.getService().path("api/instance/running/process/" + hiveProcessName)
-                .header("Remote-User", "guest")
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java b/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java
index 6dc1e3f..8791b6e 100644
--- a/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java
+++ b/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseyIT.java
@@ -33,6 +33,7 @@ import org.apache.falcon.entity.v0.process.Property;
 import org.apache.falcon.entity.v0.process.Validity;
 import org.apache.falcon.util.BuildProperties;
 import org.apache.falcon.util.DeploymentProperties;
+import org.apache.falcon.util.OozieTestUtils;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
@@ -41,7 +42,7 @@ import org.apache.oozie.client.BundleJob;
 import org.apache.oozie.client.CoordinatorJob;
 import org.apache.oozie.client.Job;
 import org.apache.oozie.client.Job.Status;
-import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.ProxyOozieClient;
 import org.testng.Assert;
 import org.testng.annotations.AfterMethod;
 import org.testng.annotations.BeforeClass;
@@ -74,8 +75,6 @@ import java.util.regex.Pattern;
 @Test(groups = {"exhaustive"})
 public class EntityManagerJerseyIT {
 
-    private static final int ONE_HR = 2 * 24 * 60 * 60 * 1000;
-
     @BeforeClass
     public void prepare() throws Exception {
         TestContext.prepare();
@@ -91,8 +90,9 @@ public class EntityManagerJerseyIT {
     private Entity getDefinition(TestContext context, EntityType type, String name) throws Exception {
         ClientResponse response =
                 context.service.path("api/entities/definition/" + type.name().toLowerCase() + "/" + name)
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).get(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .get(ClientResponse.class);
         return (Entity) type.getUnmarshaller().unmarshal(new StringReader(response.getEntity(String.class)));
     }
 
@@ -111,7 +111,7 @@ public class EntityManagerJerseyIT {
         assertLibs(fs, new Path("/project/falcon/working/libext/FEED/retention"));
         assertLibs(fs, new Path("/project/falcon/working/libext/PROCESS"));
 
-        String tmpFileName = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        String tmpFileName = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Feed feed = (Feed) EntityType.FEED.getUnmarshaller().unmarshal(new File(tmpFileName));
         Location location = new Location();
         location.setPath("fsext://global:00/falcon/test/input/${YEAR}/${MONTH}/${DAY}/${HOUR}");
@@ -120,7 +120,7 @@ public class EntityManagerJerseyIT {
         cluster.setLocations(new Locations());
         feed.getClusters().getClusters().get(0).getLocations().getLocations().add(location);
 
-        File tmpFile = context.getTempFile();
+        File tmpFile = TestContext.getTempFile();
         EntityType.FEED.getMarshaller().marshal(feed, tmpFile);
         response = context.submitAndSchedule(tmpFileName, overlay, EntityType.FEED);
         context.assertSuccessful(response);
@@ -131,15 +131,16 @@ public class EntityManagerJerseyIT {
     }
 
     private void update(TestContext context, Entity entity, Date endTime) throws Exception {
-        File tmpFile = context.getTempFile();
+        File tmpFile = TestContext.getTempFile();
         entity.getEntityType().getMarshaller().marshal(entity, tmpFile);
         WebResource resource = context.service.path("api/entities/update/"
                 + entity.getEntityType().name().toLowerCase() + "/" + entity.getName());
         if (endTime != null) {
             resource = resource.queryParam("effective", SchemaHelper.formatDateUTC(endTime));
         }
-        ClientResponse response =
-                resource.header("Remote-User", TestContext.REMOTE_USER).accept(MediaType.TEXT_XML)
+        ClientResponse response = resource
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
                 .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath()));
         context.assertSuccessful(response);
     }
@@ -148,15 +149,15 @@ public class EntityManagerJerseyIT {
     public void testUpdateCheckUser() throws Exception {
         TestContext context = newContext();
         Map<String, String> overlay = context.getUniqueOverlay();
-        String tmpFileName = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        String tmpFileName = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));
         updateEndtime(process);
-        File tmpFile = context.getTempFile();
+        File tmpFile = TestContext.getTempFile();
         EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
         context.scheduleProcess(tmpFile.getAbsolutePath(), overlay);
-        context.waitForBundleStart(Status.RUNNING);
+        OozieTestUtils.waitForBundleStart(context, Status.RUNNING);
 
-        List<BundleJob> bundles = context.getBundles();
+        List<BundleJob> bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 1);
         Assert.assertEquals(bundles.get(0).getUser(), TestContext.REMOTE_USER);
 
@@ -166,7 +167,7 @@ public class EntityManagerJerseyIT {
         feed.getLocations().getLocations().get(0).setPath("/falcon/test/output2/${YEAR}/${MONTH}/${DAY}");
         update(context, feed);
 
-        bundles = context.getBundles();
+        bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 2);
         Assert.assertEquals(bundles.get(0).getUser(), TestContext.REMOTE_USER);
         Assert.assertEquals(bundles.get(1).getUser(), TestContext.REMOTE_USER);
@@ -183,16 +184,16 @@ public class EntityManagerJerseyIT {
     public void cleanup() throws Exception {
         TestContext testContext = contexts.get();
         if (testContext != null) {
-            testContext.killOozieJobs();
+            OozieTestUtils.killOozieJobs(testContext);
         }
+
         contexts.remove();
     }
 
     public void testOptionalInput() throws Exception {
         TestContext context = newContext();
         Map<String, String> overlay = context.getUniqueOverlay();
-        String tmpFileName = context.
-                overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        String tmpFileName = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));
 
         Input in1 = process.getInputs().getInputs().get(0);
@@ -205,44 +206,47 @@ public class EntityManagerJerseyIT {
         in2.setEnd("now(0,0)");
         process.getInputs().getInputs().add(in2);
 
-        File tmpFile = context.getTempFile();
+        File tmpFile = TestContext.getTempFile();
         EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
         context.scheduleProcess(tmpFile.getAbsolutePath(), overlay);
-        context.waitForWorkflowStart(context.processName);
+        OozieTestUtils.waitForWorkflowStart(context, context.processName);
     }
 
     public void testProcessDeleteAndSchedule() throws Exception {
         //Submit process with invalid property so that coord submit fails and bundle goes to failed state
         TestContext context = newContext();
         Map<String, String> overlay = context.getUniqueOverlay();
-        String tmpFileName = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        String tmpFileName = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));
         Property prop = new Property();
         prop.setName("newProp");
         prop.setValue("${formatTim()}");
         process.getProperties().getProperties().add(prop);
-        File tmpFile = context.getTempFile();
+        File tmpFile = TestContext.getTempFile();
         EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
         context.scheduleProcess(tmpFile.getAbsolutePath(), overlay);
-        context.waitForBundleStart(Status.FAILED, Status.KILLED);
+        OozieTestUtils.waitForBundleStart(context, Status.FAILED, Status.KILLED);
 
         //Delete and re-submit the process with correct workflow
-        ClientResponse clientRepsonse = context.service.path("api/entities/delete/process/"
-                + context.processName).header(
-                "Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).delete(ClientResponse.class);
-        context.assertSuccessful(clientRepsonse);
+        ClientResponse clientResponse = context.service
+                .path("api/entities/delete/process/" + context.processName)
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .delete(ClientResponse.class);
+        context.assertSuccessful(clientResponse);
+
         process.getWorkflow().setPath("/falcon/test/workflow");
-        tmpFile = context.getTempFile();
+        tmpFile = TestContext.getTempFile();
         EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
-        clientRepsonse = context.service.path("api/entities/submitAndSchedule/process").
-                header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
+        clientResponse = context.service.path("api/entities/submitAndSchedule/process")
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .type(MediaType.TEXT_XML)
                 .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath()));
-        context.assertSuccessful(clientRepsonse);
+        context.assertSuccessful(clientResponse);
 
         //Assert that new schedule creates new bundle
-        List<BundleJob> bundles = context.getBundles();
+        List<BundleJob> bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 2);
     }
 
@@ -251,8 +255,8 @@ public class EntityManagerJerseyIT {
         //schedule a process
         TestContext context = newContext();
         context.scheduleProcess();
-        context.waitForBundleStart(Job.Status.RUNNING);
-        List<BundleJob> bundles = context.getBundles();
+        OozieTestUtils.waitForBundleStart(context, Job.Status.RUNNING);
+        List<BundleJob> bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 1);
 
         //create new file in user workflow
@@ -263,7 +267,7 @@ public class EntityManagerJerseyIT {
         Process process = (Process) getDefinition(context, EntityType.PROCESS, context.processName);
         updateEndtime(process);
         update(context, process);
-        bundles = context.getBundles();
+        bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 2);
     }
 
@@ -271,10 +275,10 @@ public class EntityManagerJerseyIT {
     public void testProcessInputUpdate() throws Exception {
         TestContext context = newContext();
         context.scheduleProcess();
-        context.waitForBundleStart(Job.Status.RUNNING);
-        List<BundleJob> bundles = context.getBundles();
+        OozieTestUtils.waitForBundleStart(context, Job.Status.RUNNING);
+        List<BundleJob> bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 1);
-        OozieClient ozClient = context.getOozieClient();
+        ProxyOozieClient ozClient = OozieTestUtils.getOozieClient(context.getCluster().getCluster());
         String coordId = ozClient.getBundleJobInfo(bundles.get(0).getId()).getCoordinators().get(0).getId();
 
         Process process = (Process) getDefinition(context, EntityType.PROCESS, context.processName);
@@ -298,7 +302,7 @@ public class EntityManagerJerseyIT {
         update(context, process, endTime);
 
         //Assert that update creates new bundle and old coord is running
-        bundles = context.getBundles();
+        bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 2);
         CoordinatorJob coord = ozClient.getCoordJobInfo(coordId);
         Assert.assertEquals(coord.getStatus(), Status.RUNNING);
@@ -308,7 +312,7 @@ public class EntityManagerJerseyIT {
     public void testProcessEndtimeUpdate() throws Exception {
         TestContext context = newContext();
         context.scheduleProcess();
-        context.waitForBundleStart(Job.Status.RUNNING);
+        OozieTestUtils.waitForBundleStart(context, Job.Status.RUNNING);
 
         Process process = (Process) getDefinition(context, EntityType.PROCESS, context.processName);
 
@@ -316,7 +320,7 @@ public class EntityManagerJerseyIT {
         update(context, process);
 
         //Assert that update does not create new bundle
-        List<BundleJob> bundles = context.getBundles();
+        List<BundleJob> bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 1);
     }
 
@@ -333,8 +337,9 @@ public class EntityManagerJerseyIT {
 
         response = context.service
                 .path("api/entities/status/feed/" + overlay.get("inputFeedName"))
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).get(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .get(ClientResponse.class);
 
         APIResult result = (APIResult) context.unmarshaller.
                 unmarshal(new StringReader(response.getEntity(String.class)));
@@ -360,9 +365,10 @@ public class EntityManagerJerseyIT {
         String feed1 = "f1" + System.currentTimeMillis();
         response = context.service
                 .path("api/entities/status/feed/" + feed1)
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_PLAIN).get(ClientResponse.class);
-        String status = response.getEntity(String.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_PLAIN)
+                .get(ClientResponse.class);
+
         Assert.assertEquals(response.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
     }
 
@@ -371,8 +377,9 @@ public class EntityManagerJerseyIT {
         ClientResponse response;
         response = context.service
                 .path("api/admin/version")
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.APPLICATION_JSON)
+                .get(ClientResponse.class);
         String json = response.getEntity(String.class);
         String buildVersion = BuildProperties.get().getProperty("build.version");
         String deployMode = DeploymentProperties.get().getProperty("deploy.mode");
@@ -391,65 +398,66 @@ public class EntityManagerJerseyIT {
         ServletInputStream stream = context.getServletInputStream(getClass().
                 getResourceAsStream(TestContext.SAMPLE_PROCESS_XML));
 
-        ClientResponse clientRepsonse = context.service
+        ClientResponse clientResponse = context.service
                 .path("api/entities/validate/process")
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
                 .post(ClientResponse.class, stream);
 
-        context.assertFailure(clientRepsonse);
+        context.assertFailure(clientResponse);
     }
 
     public void testClusterValidate() throws Exception {
         TestContext context = newContext();
-        ClientResponse clientRepsonse;
+        ClientResponse clientResponse;
         Map<String, String> overlay = context.getUniqueOverlay();
 
         InputStream stream = context.getServletInputStream(
-                context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay));
+                TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay));
 
-        clientRepsonse = context.service.path("api/entities/validate/cluster")
+        clientResponse = context.service.path("api/entities/validate/cluster")
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
-                .header("Remote-User", TestContext.REMOTE_USER)
                 .post(ClientResponse.class, stream);
-        context.assertSuccessful(clientRepsonse);
+        context.assertSuccessful(clientResponse);
     }
 
     public void testClusterSubmitScheduleSuspendResumeDelete() throws Exception {
         TestContext context = newContext();
-        ClientResponse clientRepsonse;
+        ClientResponse clientResponse;
         Map<String, String> overlay = context.getUniqueOverlay();
 
-        clientRepsonse = context.submitToFalcon(TestContext.CLUSTER_TEMPLATE, overlay,
+        clientResponse = context.submitToFalcon(TestContext.CLUSTER_TEMPLATE, overlay,
                 EntityType.CLUSTER);
-        context.assertSuccessful(clientRepsonse);
+        context.assertSuccessful(clientResponse);
 
-        clientRepsonse = context.service
+        clientResponse = context.service
                 .path("api/entities/schedule/cluster/" + context.clusterName)
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
                 .post(ClientResponse.class);
-        context.assertFailure(clientRepsonse);
+        context.assertFailure(clientResponse);
 
-        clientRepsonse = context.service
+        clientResponse = context.service
                 .path("api/entities/suspend/cluster/" + context.clusterName)
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
                 .post(ClientResponse.class);
-        context.assertFailure(clientRepsonse);
+        context.assertFailure(clientResponse);
 
-        clientRepsonse = context.service
+        clientResponse = context.service
                 .path("api/entities/resume/cluster/" + context.clusterName)
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
                 .post(ClientResponse.class);
-        context.assertFailure(clientRepsonse);
+        context.assertFailure(clientResponse);
 
-        clientRepsonse = context.service
+        clientResponse = context.service
                 .path("api/entities/delete/cluster/" + context.clusterName)
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).delete(ClientResponse.class);
-        context.assertSuccessful(clientRepsonse);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .delete(ClientResponse.class);
+        context.assertSuccessful(clientResponse);
     }
 
     public void testSubmit() throws Exception {
@@ -483,8 +491,9 @@ public class EntityManagerJerseyIT {
 
         response = context.service
                 .path("api/entities/definition/feed/" + overlay.get("inputFeedName"))
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).get(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .get(ClientResponse.class);
 
         String feedXML = response.getEntity(String.class);
         try {
@@ -498,28 +507,31 @@ public class EntityManagerJerseyIT {
 
     public void testInvalidGetEntityDefinition() {
         TestContext context = newContext();
-        ClientResponse clientRepsonse = context.service
+        ClientResponse clientResponse = context.service
                 .path("api/entities/definition/process/sample1")
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).get(ClientResponse.class);
-        context.assertFailure(clientRepsonse);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .get(ClientResponse.class);
+        context.assertFailure(clientResponse);
     }
 
     public void testScheduleSuspendResume() throws Exception {
         TestContext context = newContext();
         context.scheduleProcess();
 
-        ClientResponse clientRepsonse = context.service
+        ClientResponse clientResponse = context.service
                 .path("api/entities/suspend/process/" + context.processName)
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).post(ClientResponse.class);
-        context.assertSuccessful(clientRepsonse);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .post(ClientResponse.class);
+        context.assertSuccessful(clientResponse);
 
-        clientRepsonse = context.service
+        clientResponse = context.service
                 .path("api/entities/resume/process/" + context.processName)
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).post(ClientResponse.class);
-        context.assertSuccessful(clientRepsonse);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .post(ClientResponse.class);
+        context.assertSuccessful(clientResponse);
     }
 
     public void testFeedSchedule() throws Exception {
@@ -534,12 +546,12 @@ public class EntityManagerJerseyIT {
         context.assertSuccessful(response);
 
         createTestData(context);
-        ClientResponse clientRepsonse = context.service
+        ClientResponse clientResponse = context.service
                 .path("api/entities/schedule/feed/" + overlay.get("inputFeedName"))
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
                 .post(ClientResponse.class);
-        context.assertSuccessful(clientRepsonse);
+        context.assertSuccessful(clientResponse);
     }
 
     static List<Path> createTestData(TestContext context) throws Exception {
@@ -605,8 +617,9 @@ public class EntityManagerJerseyIT {
 
         response = context.service
                 .path("api/entities/delete/feed/" + overlay.get("inputFeedName"))
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).delete(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .delete(ClientResponse.class);
         context.assertSuccessful(response);
     }
 
@@ -623,8 +636,9 @@ public class EntityManagerJerseyIT {
 
         response = context.service
                 .path("api/entities/delete/cluster/" + context.clusterName)
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).delete(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .delete(ClientResponse.class);
         context.assertFailure(response);
 
         response = context.submitToFalcon(TestContext.FEED_TEMPLATE2, overlay, EntityType.FEED);
@@ -636,32 +650,35 @@ public class EntityManagerJerseyIT {
         //Delete a referred feed
         response = context.service
                 .path("api/entities/delete/feed/" + overlay.get("inputFeedName"))
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).delete(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .delete(ClientResponse.class);
         context.assertFailure(response);
 
         //Delete a submitted process
         response = context.service
                 .path("api/entities/delete/process/" + context.processName)
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).delete(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .delete(ClientResponse.class);
         context.assertSuccessful(response);
 
         response = context.submitToFalcon(TestContext.PROCESS_TEMPLATE, overlay, EntityType.PROCESS);
         context.assertSuccessful(response);
 
-        ClientResponse clientRepsonse = context.service
+        ClientResponse clientResponse = context.service
                 .path("api/entities/schedule/process/" + context.processName)
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
                 .post(ClientResponse.class);
-        context.assertSuccessful(clientRepsonse);
+        context.assertSuccessful(clientResponse);
 
         //Delete a scheduled process
         response = context.service
                 .path("api/entities/delete/process/" + context.processName)
-                .header("Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).delete(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .delete(ClientResponse.class);
         context.assertSuccessful(response);
     }
 
@@ -671,8 +688,10 @@ public class EntityManagerJerseyIT {
         ClientResponse response;
         response = context.service
                 .path("api/entities/list/process/")
-                .header("Remote-User", TestContext.REMOTE_USER).type(MediaType.TEXT_XML)
-                .accept(MediaType.TEXT_XML).get(ClientResponse.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .type(MediaType.TEXT_XML)
+                .accept(MediaType.TEXT_XML)
+                .get(ClientResponse.class);
         Assert.assertEquals(response.getStatus(), 200);
 
         Map<String, String> overlay = context.getUniqueOverlay();
@@ -682,7 +701,7 @@ public class EntityManagerJerseyIT {
 
         response = context.service
                 .path("api/entities/list/cluster/")
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .type(MediaType.TEXT_XML)
                 .accept(MediaType.TEXT_XML)
                 .get(ClientResponse.class);
@@ -696,7 +715,7 @@ public class EntityManagerJerseyIT {
         response = context.service
                 .path("api/entities/list/cluster/")
                 .queryParam("fields", "status")
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .type(MediaType.APPLICATION_JSON)
                 .accept(MediaType.APPLICATION_JSON)
                 .get(ClientResponse.class);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseySmokeIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseySmokeIT.java b/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseySmokeIT.java
index b96aa48..082f541 100644
--- a/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseySmokeIT.java
+++ b/webapp/src/test/java/org/apache/falcon/resource/EntityManagerJerseySmokeIT.java
@@ -21,6 +21,7 @@ import com.sun.jersey.api.client.ClientResponse;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.entity.v0.process.Property;
+import org.apache.falcon.util.OozieTestUtils;
 import org.apache.oozie.client.BundleJob;
 import org.apache.oozie.client.Job.Status;
 import org.testng.Assert;
@@ -57,7 +58,7 @@ public class EntityManagerJerseySmokeIT {
     public void cleanup() throws Exception {
         TestContext testContext = contexts.get();
         if (testContext != null) {
-            testContext.killOozieJobs();
+            OozieTestUtils.killOozieJobs(testContext);
         }
         contexts.remove();
     }
@@ -66,34 +67,37 @@ public class EntityManagerJerseySmokeIT {
         //Submit process with invalid property so that coord submit fails and bundle goes to failed state
         TestContext context = newContext();
         Map<String, String> overlay = context.getUniqueOverlay();
-        String tmpFileName = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        String tmpFileName = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Process process = (Process) EntityType.PROCESS.getUnmarshaller().unmarshal(new File(tmpFileName));
         Property prop = new Property();
         prop.setName("newProp");
         prop.setValue("${formatTim()}");
         process.getProperties().getProperties().add(prop);
-        File tmpFile = context.getTempFile();
+        File tmpFile = TestContext.getTempFile();
         EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
         context.scheduleProcess(tmpFile.getAbsolutePath(), overlay);
-        context.waitForBundleStart(Status.FAILED);
+        OozieTestUtils.waitForBundleStart(context, Status.FAILED);
 
         //Delete and re-submit the process with correct workflow
-        ClientResponse clientRepsonse = context.service.path("api/entities/delete/process/"
-                + context.processName).header(
-                "Remote-User", TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).delete(ClientResponse.class);
-        context.assertSuccessful(clientRepsonse);
+        ClientResponse clientResponse = context.service
+                .path("api/entities/delete/process/" + context.processName)
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .delete(ClientResponse.class);
+        context.assertSuccessful(clientResponse);
+
         process.getWorkflow().setPath("/falcon/test/workflow");
-        tmpFile = context.getTempFile();
+        tmpFile = TestContext.getTempFile();
         EntityType.PROCESS.getMarshaller().marshal(process, tmpFile);
-        clientRepsonse = context.service.path("api/entities/submitAndSchedule/process").
-                header("Remote-User", TestContext.REMOTE_USER)
+
+        clientResponse = context.service.path("api/entities/submitAndSchedule/process")
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
                 .post(ClientResponse.class, context.getServletInputStream(tmpFile.getAbsolutePath()));
-        context.assertSuccessful(clientRepsonse);
+        context.assertSuccessful(clientResponse);
 
         //Assert that new schedule creates new bundle
-        List<BundleJob> bundles = context.getBundles();
+        List<BundleJob> bundles = OozieTestUtils.getBundles(context);
         Assert.assertEquals(bundles.size(), 2);
     }
 
@@ -102,7 +106,7 @@ public class EntityManagerJerseySmokeIT {
         ClientResponse response;
         Map<String, String> overlay = context.getUniqueOverlay();
 
-        response = context.submitToFalcon(context.CLUSTER_TEMPLATE, overlay, EntityType.CLUSTER);
+        response = context.submitToFalcon(TestContext.CLUSTER_TEMPLATE, overlay, EntityType.CLUSTER);
         context.assertSuccessful(response);
 
         response = context.submitToFalcon(TestContext.FEED_TEMPLATE1, overlay, EntityType.FEED);
@@ -111,7 +115,7 @@ public class EntityManagerJerseySmokeIT {
         EntityManagerJerseyIT.createTestData(context);
         ClientResponse clientRepsonse = context.service
                 .path("api/entities/schedule/feed/" + overlay.get("inputFeedName"))
-                .header("Remote-User", TestContext.REMOTE_USER)
+                .header("Cookie", context.getAuthenticationToken())
                 .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
                 .post(ClientResponse.class);
         context.assertSuccessful(clientRepsonse);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/resource/ProcessInstanceManagerIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/resource/ProcessInstanceManagerIT.java b/webapp/src/test/java/org/apache/falcon/resource/ProcessInstanceManagerIT.java
index c0785ba..03a19cd 100644
--- a/webapp/src/test/java/org/apache/falcon/resource/ProcessInstanceManagerIT.java
+++ b/webapp/src/test/java/org/apache/falcon/resource/ProcessInstanceManagerIT.java
@@ -26,8 +26,9 @@ import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.resource.InstancesResult.Instance;
 import org.apache.falcon.resource.InstancesResult.WorkflowStatus;
+import org.apache.falcon.util.OozieTestUtils;
 import org.apache.falcon.workflow.engine.OozieClientFactory;
-import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.ProxyOozieClient;
 import org.apache.oozie.client.WorkflowJob;
 import org.testng.Assert;
 import org.testng.annotations.Test;
@@ -43,14 +44,16 @@ public class ProcessInstanceManagerIT {
 
     protected void schedule(TestContext context) throws Exception {
         context.scheduleProcess();
-        context.waitForProcessWFtoStart();
+        OozieTestUtils.waitForProcessWFtoStart(context);
     }
 
     public void testGetRunningInstances() throws Exception {
         TestContext context = new TestContext();
         schedule(context);
         InstancesResult response = context.service.path("api/instance/running/process/" + context.processName)
-                .header("Remote-User", "guest").accept(MediaType.APPLICATION_JSON).get(InstancesResult.class);
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.APPLICATION_JSON)
+                .get(InstancesResult.class);
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());
         Assert.assertNotNull(response.getInstances());
         Assert.assertEquals(1, response.getInstances().length);
@@ -68,7 +71,9 @@ public class ProcessInstanceManagerIT {
         TestContext context = new TestContext();
         schedule(context);
         InstancesResult response = context.service.path("api/instance/status/process/" + context.processName)
-                .queryParam("start", START_INSTANCE).header("Remote-User", "guest").accept(MediaType.APPLICATION_JSON)
+                .queryParam("start", START_INSTANCE)
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());
         Assert.assertNotNull(response.getInstances());
@@ -80,7 +85,9 @@ public class ProcessInstanceManagerIT {
         testKillInstances();
         TestContext context = new TestContext();
         InstancesResult response = context.service.path("api/instance/rerun/process/" + context.processName)
-                .queryParam("start", START_INSTANCE).header("Remote-User", "guest").accept(MediaType.APPLICATION_JSON)
+                .queryParam("start", START_INSTANCE)
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.APPLICATION_JSON)
                 .post(InstancesResult.class);
 
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());
@@ -95,7 +102,9 @@ public class ProcessInstanceManagerIT {
         TestContext context = new TestContext();
         schedule(context);
         InstancesResult response = context.service.path("api/instance/kill/process/" + context.processName)
-                .queryParam("start", START_INSTANCE).header("Remote-User", "guest").accept(MediaType.APPLICATION_JSON)
+                .queryParam("start", START_INSTANCE)
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.APPLICATION_JSON)
                 .post(InstancesResult.class);
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());
         Assert.assertNotNull(response.getInstances());
@@ -109,7 +118,9 @@ public class ProcessInstanceManagerIT {
         TestContext context = new TestContext();
         schedule(context);
         InstancesResult response = context.service.path("api/instance/suspend/process/" + context.processName)
-                .queryParam("start", START_INSTANCE).header("Remote-User", "guest").accept(MediaType.APPLICATION_JSON)
+                .queryParam("start", START_INSTANCE)
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.APPLICATION_JSON)
                 .post(InstancesResult.class);
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());
         Assert.assertNotNull(response.getInstances());
@@ -124,7 +135,9 @@ public class ProcessInstanceManagerIT {
 
         TestContext context = new TestContext();
         InstancesResult response = context.service.path("api/instance/resume/process/" + context.processName)
-                .queryParam("start", START_INSTANCE).header("Remote-User", "guest").accept(MediaType.APPLICATION_JSON)
+                .queryParam("start", START_INSTANCE)
+                .header("Cookie", context.getAuthenticationToken())
+                .accept(MediaType.APPLICATION_JSON)
                 .post(InstancesResult.class);
         Assert.assertEquals(APIResult.Status.SUCCEEDED, response.getStatus());
         Assert.assertNotNull(response.getInstances());
@@ -137,7 +150,7 @@ public class ProcessInstanceManagerIT {
     private void waitForWorkflow(String instance, WorkflowJob.Status status) throws Exception {
         TestContext context = new TestContext();
         ExternalId extId = new ExternalId(context.processName, Tag.DEFAULT, EntityUtil.parseDateUTC(instance));
-        OozieClient ozClient = OozieClientFactory.get(
+        ProxyOozieClient ozClient = OozieClientFactory.get(
                 (Cluster) ConfigurationStore.get().get(EntityType.CLUSTER, context.clusterName));
         String jobId = ozClient.getJobId(extId.getId());
         WorkflowJob jobInfo = null;
@@ -149,6 +162,8 @@ public class ProcessInstanceManagerIT {
             System.out.println("Waiting for workflow job " + jobId + " status " + status);
             Thread.sleep((i + 1) * 1000);
         }
+
+        Assert.assertNotNull(jobInfo);
         Assert.assertEquals(status, jobInfo.getStatus());
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/resource/TestContext.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/resource/TestContext.java b/webapp/src/test/java/org/apache/falcon/resource/TestContext.java
index 9e10956..b7a2256 100644
--- a/webapp/src/test/java/org/apache/falcon/resource/TestContext.java
+++ b/webapp/src/test/java/org/apache/falcon/resource/TestContext.java
@@ -26,6 +26,8 @@ import com.sun.jersey.api.client.config.DefaultClientConfig;
 import org.apache.commons.lang.RandomStringUtils;
 import org.apache.falcon.FalconException;
 import org.apache.falcon.cli.FalconCLI;
+import org.apache.falcon.client.FalconCLIException;
+import org.apache.falcon.client.FalconClient;
 import org.apache.falcon.cluster.util.EmbeddedCluster;
 import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.EntityType;
@@ -34,17 +36,11 @@ import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.falcon.workflow.engine.OozieClientFactory;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.BundleJob;
-import org.apache.oozie.client.CoordinatorJob;
-import org.apache.oozie.client.Job;
-import org.apache.oozie.client.Job.Status;
-import org.apache.oozie.client.OozieClient;
-import org.apache.oozie.client.WorkflowJob;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.testng.Assert;
 
 import javax.servlet.ServletInputStream;
@@ -65,14 +61,9 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.StringReader;
-import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
-import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -80,8 +71,6 @@ import java.util.regex.Pattern;
  * Base test class for CLI, Entity and Process Instances.
  */
 public class TestContext {
-    private static final Logger LOG = Logger.getLogger(TestContext.class);
-
     public static final String FEED_TEMPLATE1 = "/feed-template1.xml";
     public static final String FEED_TEMPLATE2 = "/feed-template2.xml";
 
@@ -92,25 +81,63 @@ public class TestContext {
     public static final String PIG_PROCESS_TEMPLATE = "/pig-process-template.xml";
 
     public static final String BASE_URL = "http://localhost:41000/falcon-webapp";
-    public static final String REMOTE_USER = System.getProperty("user.name");
+    public static final String REMOTE_USER = FalconClient.USER;
+
+    private static final String AUTH_COOKIE_EQ = AuthenticatedURL.AUTH_COOKIE + "=";
 
     protected Unmarshaller unmarshaller;
     protected Marshaller marshaller;
 
     protected EmbeddedCluster cluster;
     protected WebResource service = null;
+    private AuthenticatedURL.Token authenticationToken;
+
     protected String clusterName;
     protected String processName;
     protected String outputFeedName;
 
     public static final Pattern VAR_PATTERN = Pattern.compile("##[A-Za-z0-9_]*##");
 
-    public Unmarshaller getUnmarshaller() {
-        return unmarshaller;
+    public TestContext() {
+        try {
+            JAXBContext jaxbContext = JAXBContext.newInstance(APIResult.class, Feed.class, Process.class, Cluster.class,
+                    InstancesResult.class);
+            unmarshaller = jaxbContext.createUnmarshaller();
+            marshaller = jaxbContext.createMarshaller();
+            configure();
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public void configure() throws Exception {
+        StartupProperties.get().setProperty(
+                "application.services",
+                StartupProperties.get().getProperty("application.services")
+                        .replace("org.apache.falcon.service.ProcessSubscriberService", ""));
+        String store = StartupProperties.get().getProperty("config.store.uri");
+        StartupProperties.get().setProperty("config.store.uri", store + System.currentTimeMillis());
+
+        try {
+            String baseUrl = BASE_URL;
+            if (!baseUrl.endsWith("/")) {
+                baseUrl += "/";
+            }
+            this.authenticationToken = FalconClient.getToken(baseUrl);
+        } catch (FalconCLIException e) {
+            throw new AuthenticationException(e);
+        }
+
+        ClientConfig config = new DefaultClientConfig();
+        Client client = Client.create(config);
+        client.setReadTimeout(500000);
+        client.setConnectTimeout(500000);
+        this.service = client.resource(UriBuilder.fromUri(BASE_URL).build());
     }
 
-    public Marshaller getMarshaller() {
-        return marshaller;
+    public void setCluster(String cName) throws Exception {
+        cluster = EmbeddedCluster.newCluster(cName, true);
+        this.clusterName = cluster.getCluster().getName();
     }
 
     public EmbeddedCluster getCluster() {
@@ -121,6 +148,10 @@ public class TestContext {
         return service;
     }
 
+    public String getAuthenticationToken() {
+        return AUTH_COOKIE_EQ + authenticationToken;
+    }
+
     public String getClusterName() {
         return clusterName;
     }
@@ -129,10 +160,6 @@ public class TestContext {
         return processName;
     }
 
-    public String getOutputFeedName() {
-        return outputFeedName;
-    }
-
     public String getClusterFileTemplate() {
         return CLUSTER_TEMPLATE;
     }
@@ -149,9 +176,12 @@ public class TestContext {
 
         response = submitToFalcon(processTemplate, overlay, EntityType.PROCESS);
         assertSuccessful(response);
-        ClientResponse clientRepsonse = this.service.path("api/entities/schedule/process/" + processName)
-                .header("Remote-User", REMOTE_USER).accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML).post(
-                        ClientResponse.class);
+
+        ClientResponse clientRepsonse = this.service
+                .path("api/entities/schedule/process/" + processName)
+                .header("Cookie", getAuthenticationToken())
+                .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
+                .post(ClientResponse.class);
         assertSuccessful(clientRepsonse);
     }
 
@@ -159,98 +189,6 @@ public class TestContext {
         scheduleProcess(PROCESS_TEMPLATE, getUniqueOverlay());
     }
 
-    private List<WorkflowJob> getRunningJobs(String entityName) throws Exception {
-        OozieClient ozClient = OozieClientFactory.get(cluster.getCluster());
-        StringBuilder builder = new StringBuilder();
-        builder.append(OozieClient.FILTER_STATUS).append('=').append(Job.Status.RUNNING).append(';');
-        builder.append(OozieClient.FILTER_NAME).append('=').append("FALCON_PROCESS_DEFAULT_").append(entityName);
-        return ozClient.getJobsInfo(builder.toString());
-    }
-
-    public void waitForWorkflowStart(String entityName) throws Exception {
-        for (int i = 0; i < 10; i++) {
-            List<WorkflowJob> jobs = getRunningJobs(entityName);
-            if (jobs != null && !jobs.isEmpty()) {
-                return;
-            }
-
-            System.out.println("Waiting for workflow to start");
-            Thread.sleep(i * 500);
-        }
-        throw new Exception("Workflow for " + entityName + " hasn't started in oozie");
-    }
-
-    public void waitForProcessWFtoStart() throws Exception {
-        waitForWorkflowStart(processName);
-    }
-
-    public void waitForOutputFeedWFtoStart() throws Exception {
-        waitForWorkflowStart(outputFeedName);
-    }
-
-    public void waitForBundleStart(Status... status) throws Exception {
-        OozieClient ozClient = OozieClientFactory.get(cluster.getCluster());
-        List<BundleJob> bundles = getBundles();
-        if (bundles.isEmpty()) {
-            return;
-        }
-
-        Set<Status> statuses = new HashSet<Status>(Arrays.asList(status));
-        String bundleId = bundles.get(0).getId();
-        for (int i = 0; i < 15; i++) {
-            Thread.sleep(i * 1000);
-            BundleJob bundle = ozClient.getBundleJobInfo(bundleId);
-            if (statuses.contains(bundle.getStatus())) {
-                if (statuses.contains(Status.FAILED) || statuses.contains(Status.KILLED)) {
-                    return;
-                }
-
-                boolean done = false;
-                for (CoordinatorJob coord : bundle.getCoordinators()) {
-                    if (statuses.contains(coord.getStatus())) {
-                        done = true;
-                    }
-                }
-                if (done) {
-                    return;
-                }
-            }
-            System.out.println("Waiting for bundle " + bundleId + " in " + status + " state");
-        }
-        throw new Exception("Bundle " + bundleId + " is not " + status + " in oozie");
-    }
-
-    public TestContext() {
-        try {
-            JAXBContext jaxbContext = JAXBContext.newInstance(APIResult.class, Feed.class, Process.class, Cluster.class,
-                    InstancesResult.class);
-            unmarshaller = jaxbContext.createUnmarshaller();
-            marshaller = jaxbContext.createMarshaller();
-            configure();
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    public void configure() throws Exception {
-        StartupProperties.get().setProperty(
-                "application.services",
-                StartupProperties.get().getProperty("application.services")
-                        .replace("org.apache.falcon.service.ProcessSubscriberService", ""));
-        String store = StartupProperties.get().getProperty("config.store.uri");
-        StartupProperties.get().setProperty("config.store.uri", store + System.currentTimeMillis());
-        ClientConfig config = new DefaultClientConfig();
-        Client client = Client.create(config);
-        client.setReadTimeout(500000);
-        client.setConnectTimeout(500000);
-        this.service = client.resource(UriBuilder.fromUri(BASE_URL).build());
-    }
-
-    public void setCluster(String cName) throws Exception {
-        cluster = EmbeddedCluster.newCluster(cName, true);
-        this.clusterName = cluster.getCluster().getName();
-    }
-
     /**
      * Converts a InputStream into ServletInputStream.
      *
@@ -278,7 +216,9 @@ public class TestContext {
         ServletInputStream rawlogStream = getServletInputStream(tmpFile);
 
         return this.service.path("api/entities/submitAndSchedule/" + entityType.name().toLowerCase())
-                .header("Remote-User", TestContext.REMOTE_USER).accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML)
+                .header("Cookie", getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .type(MediaType.TEXT_XML)
                 .post(ClientResponse.class, rawlogStream);
     }
 
@@ -300,50 +240,44 @@ public class TestContext {
 
         ServletInputStream rawlogStream = getServletInputStream(tmpFile);
 
-        return this.service.path("api/entities/submit/" + entityType.name().toLowerCase()).header("Remote-User",
-                TestContext.REMOTE_USER)
-                .accept(MediaType.TEXT_XML).type(MediaType.TEXT_XML).post(ClientResponse.class, rawlogStream);
-    }
-
-    public void assertRequestId(ClientResponse clientRepsonse) {
-        String response = clientRepsonse.getEntity(String.class);
-        try {
-            APIResult result = (APIResult) unmarshaller.unmarshal(new StringReader(response));
-            Assert.assertNotNull(result.getRequestId());
-        } catch (JAXBException e) {
-            Assert.fail("Reponse " + response + " is not valid");
-        }
+        return this.service.path("api/entities/submit/" + entityType.name().toLowerCase())
+                .header("Cookie", getAuthenticationToken())
+                .accept(MediaType.TEXT_XML)
+                .type(MediaType.TEXT_XML)
+                .post(ClientResponse.class, rawlogStream);
     }
 
-    public void assertStatus(ClientResponse clientRepsonse, APIResult.Status status) {
-        String response = clientRepsonse.getEntity(String.class);
+    public void assertStatus(ClientResponse clientResponse, APIResult.Status status) {
+        String response = clientResponse.getEntity(String.class);
         try {
             APIResult result = (APIResult) unmarshaller.unmarshal(new StringReader(response));
             Assert.assertEquals(result.getStatus(), status);
         } catch (JAXBException e) {
-            Assert.fail("Reponse " + response + " is not valid");
+            Assert.fail("Response " + response + " is not valid");
         }
     }
 
-    public void assertFailure(ClientResponse clientRepsonse) {
-        Assert.assertEquals(clientRepsonse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
-        assertStatus(clientRepsonse, APIResult.Status.FAILED);
+    public void assertFailure(ClientResponse clientResponse) {
+        Assert.assertEquals(clientResponse.getStatus(), Response.Status.BAD_REQUEST.getStatusCode());
+        assertStatus(clientResponse, APIResult.Status.FAILED);
     }
 
-    public void assertSuccessful(ClientResponse clientRepsonse) {
-        Assert.assertEquals(clientRepsonse.getStatus(), Response.Status.OK.getStatusCode());
-        assertStatus(clientRepsonse, APIResult.Status.SUCCEEDED);
+    public void assertSuccessful(ClientResponse clientResponse) {
+        Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode());
+        assertStatus(clientResponse, APIResult.Status.SUCCEEDED);
     }
 
-    public String overlayParametersOverTemplate(String template, Map<String, String> overlay) throws IOException {
+    public static String overlayParametersOverTemplate(String template,
+                                                       Map<String, String> overlay) throws IOException {
         File tmpFile = getTempFile();
         OutputStream out = new FileOutputStream(tmpFile);
 
         InputStreamReader in;
-        if (getClass().getResourceAsStream(template) == null) {
+        InputStream resourceAsStream = TestContext.class.getResourceAsStream(template);
+        if (resourceAsStream == null) {
             in = new FileReader(template);
         } else {
-            in = new InputStreamReader(getClass().getResourceAsStream(template));
+            in = new InputStreamReader(resourceAsStream);
         }
         BufferedReader reader = new BufferedReader(in);
         String line;
@@ -362,7 +296,7 @@ public class TestContext {
         return tmpFile.getAbsolutePath();
     }
 
-    public File getTempFile() throws IOException {
+    public static File getTempFile() throws IOException {
         File target = new File("webapp/target");
         if (!target.exists()) {
             target = new File("target");
@@ -371,35 +305,6 @@ public class TestContext {
         return File.createTempFile("test", ".xml", target);
     }
 
-    public OozieClient getOozieClient() throws FalconException {
-        return OozieClientFactory.get(cluster.getCluster());
-    }
-
-    public List<BundleJob> getBundles() throws Exception {
-        List<BundleJob> bundles = new ArrayList<BundleJob>();
-        if (clusterName == null) {
-            return bundles;
-        }
-
-        OozieClient ozClient = OozieClientFactory.get(cluster.getCluster());
-        return ozClient.getBundleJobsInfo("name=FALCON_PROCESS_" + processName, 0, 10);
-    }
-
-    public boolean killOozieJobs() throws Exception {
-        if (cluster == null) {
-            return true;
-        }
-
-        OozieClient ozClient = OozieClientFactory.get(cluster.getCluster());
-        List<BundleJob> bundles = getBundles();
-        if (bundles != null) {
-            for (BundleJob bundle : bundles) {
-                ozClient.kill(bundle.getId());
-            }
-        }
-        return false;
-    }
-
     public Map<String, String> getUniqueOverlay() throws FalconException {
         Map<String, String> overlay = new HashMap<String, String>();
         long time = System.currentTimeMillis();
@@ -440,8 +345,7 @@ public class TestContext {
         Map<String, String> overlay = new HashMap<String, String>();
         overlay.put("cluster", RandomStringUtils.randomAlphabetic(5));
         overlay.put("colo", "gs");
-        TestContext context = new TestContext();
-        context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        TestContext.overlayParametersOverTemplate(clusterTemplate, overlay);
         EmbeddedCluster cluster = EmbeddedCluster.newCluster(overlay.get("cluster"), true);
 
         cleanupStore();

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/security/BasicAuthFilterTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/security/BasicAuthFilterTest.java b/webapp/src/test/java/org/apache/falcon/security/BasicAuthFilterTest.java
index 0ff993b..1caf914 100644
--- a/webapp/src/test/java/org/apache/falcon/security/BasicAuthFilterTest.java
+++ b/webapp/src/test/java/org/apache/falcon/security/BasicAuthFilterTest.java
@@ -20,9 +20,11 @@ package org.apache.falcon.security;
 
 import org.apache.falcon.util.StartupProperties;
 import org.mockito.Mock;
+import org.mockito.Mockito;
 import org.mockito.MockitoAnnotations;
 import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
+import org.testng.annotations.BeforeMethod;
 import org.testng.annotations.Test;
 
 import javax.servlet.Filter;
@@ -30,9 +32,9 @@ import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.core.Response;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
-import static org.mockito.Mockito.*;
 
 /**
  * Test for BasicAuthFilter using mock objects.
@@ -56,28 +58,43 @@ public class BasicAuthFilterTest {
         MockitoAnnotations.initMocks(this);
     }
 
+    @BeforeMethod
+    private void initAuthType() {
+        ConcurrentHashMap<String, String> conf = new ConcurrentHashMap<String, String>();
+        conf.put("type", "simple");
+        conf.put("config.prefix.type", "");
+        conf.put("anonymous.allowed", "true");
+        Mockito.when(mockConfig.getInitParameterNames()).thenReturn(conf.keys());
+
+        for (Map.Entry<String, String> entry : conf.entrySet()) {
+            Mockito.when(mockConfig.getInitParameter(entry.getKey())).thenReturn(entry.getValue());
+        }
+
+        Mockito.when(mockRequest.getMethod()).thenReturn("OPTIONS");
+
+        StringBuffer requestUrl = new StringBuffer("http://localhost");
+        Mockito.when(mockRequest.getRequestURL()).thenReturn(requestUrl);
+    }
+
     @Test
     public void testDoFilter() throws Exception {
         Filter filter = new BasicAuthFilter();
         synchronized (StartupProperties.get()) {
-            StartupProperties.get().setProperty("security.enabled", "false");
             filter.init(mockConfig);
         }
 
         CurrentUser.authenticate("nouser");
         Assert.assertEquals(CurrentUser.getUser(), "nouser");
-        when(mockRequest.getHeader("Remote-User")).thenReturn("testuser");
+
+        CurrentUser.authenticate("guest");
+        Mockito.when(mockRequest.getQueryString()).thenReturn("user.name=guest");
         filter.doFilter(mockRequest, mockResponse, mockChain);
         Assert.assertEquals(CurrentUser.getUser(), "guest");
 
-        synchronized (StartupProperties.get()) {
-            StartupProperties.get().remove("security.enabled");
-            filter.init(mockConfig);
-        }
-
         CurrentUser.authenticate("nouser");
         Assert.assertEquals(CurrentUser.getUser(), "nouser");
-        when(mockRequest.getHeader("Remote-User")).thenReturn("testuser");
+        CurrentUser.authenticate("testuser");
+        Mockito.when(mockRequest.getRemoteUser()).thenReturn("testuser");
         filter.doFilter(mockRequest, mockResponse, mockChain);
         Assert.assertEquals(CurrentUser.getUser(), "testuser");
     }
@@ -87,13 +104,14 @@ public class BasicAuthFilterTest {
         Filter filter = new BasicAuthFilter();
 
         synchronized (StartupProperties.get()) {
-            StartupProperties.get().setProperty("security.enabled", "true");
             filter.init(mockConfig);
         }
 
         CurrentUser.authenticate("nouser");
         Assert.assertEquals(CurrentUser.getUser(), "nouser");
-        when(mockRequest.getHeader("Remote-User")).thenReturn("testuser");
+
+        CurrentUser.authenticate("testuser");
+        Mockito.when(mockRequest.getRemoteUser()).thenReturn("testuser");
         filter.doFilter(mockRequest, mockResponse, mockChain);
         Assert.assertEquals(CurrentUser.getUser(), "testuser");
     }
@@ -103,14 +121,52 @@ public class BasicAuthFilterTest {
         Filter filter = new BasicAuthFilter();
 
         synchronized (StartupProperties.get()) {
-            StartupProperties.get().setProperty("security.enabled", "true");
             filter.init(mockConfig);
         }
 
-        HttpServletResponse errorResponse = mock(HttpServletResponse.class);
-        when(mockRequest.getHeader("Remote-User")).thenReturn(null);
-        filter.doFilter(mockRequest, errorResponse, mockChain);
-        verify(errorResponse).sendError(Response.Status.BAD_REQUEST.getStatusCode(),
-                "Remote user header can't be empty");
+        final String userName = System.getProperty("user.name");
+        try {
+            System.setProperty("user.name", "");
+
+            Mockito.when(mockRequest.getMethod()).thenReturn("POST");
+            Mockito.when(mockRequest.getQueryString()).thenReturn("");
+            Mockito.when(mockRequest.getRemoteUser()).thenReturn(null);
+
+            HttpServletResponse errorResponse = Mockito.mock(HttpServletResponse.class);
+            filter.doFilter(mockRequest, errorResponse, mockChain);
+        } finally {
+            System.setProperty("user.name", userName);
+        }
+    }
+
+    @Test
+    public void testDoFilterForClientBackwardsCompatibility() throws Exception {
+        Filter filter = new BasicAuthFilter();
+
+        final String userName = System.getProperty("user.name");
+        final String httpAuthType =
+                StartupProperties.get().getProperty("falcon.http.authentication.type", "simple");
+        try {
+            System.setProperty("user.name", "");
+            StartupProperties.get().setProperty("falcon.http.authentication.type",
+                    "org.apache.falcon.security.RemoteUserInHeaderBasedAuthenticationHandler");
+
+            synchronized (StartupProperties.get()) {
+                filter.init(mockConfig);
+            }
+
+            Mockito.when(mockRequest.getMethod()).thenReturn("POST");
+            Mockito.when(mockRequest.getQueryString()).thenReturn("");
+            Mockito.when(mockRequest.getRemoteUser()).thenReturn(null);
+            Mockito.when(mockRequest.getHeader("Remote-User")).thenReturn("remote-user");
+
+            filter.doFilter(mockRequest, mockResponse, mockChain);
+
+            Assert.assertEquals(CurrentUser.getUser(), "remote-user");
+
+        } finally {
+            System.setProperty("user.name", userName);
+            StartupProperties.get().setProperty("falcon.http.authentication.type", httpAuthType);
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java b/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java
index 3769dde..e430875 100644
--- a/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java
+++ b/webapp/src/test/java/org/apache/falcon/util/OozieTestUtils.java
@@ -18,16 +18,25 @@
 
 package org.apache.falcon.util;
 
+import org.apache.falcon.FalconException;
 import org.apache.falcon.entity.ClusterHelper;
 import org.apache.falcon.entity.EntityUtil;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.logging.LogMover;
+import org.apache.falcon.resource.TestContext;
 import org.apache.falcon.workflow.engine.OozieClientFactory;
 import org.apache.hadoop.fs.Path;
-import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.BundleJob;
+import org.apache.oozie.client.CoordinatorJob;
+import org.apache.oozie.client.Job;
+import org.apache.oozie.client.ProxyOozieClient;
 import org.apache.oozie.client.WorkflowJob;
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 /**
  * Oozie Utility class for integration-tests.
@@ -37,8 +46,99 @@ public final class OozieTestUtils {
     private OozieTestUtils() {
     }
 
+    public static ProxyOozieClient getOozieClient(TestContext context) throws FalconException {
+        return getOozieClient(context.getCluster().getCluster());
+    }
+
+    public static ProxyOozieClient getOozieClient(Cluster cluster) throws FalconException {
+        return OozieClientFactory.get(cluster);
+    }
+
+    public static List<BundleJob> getBundles(TestContext context) throws Exception {
+        List<BundleJob> bundles = new ArrayList<BundleJob>();
+        if (context.getClusterName() == null) {
+            return bundles;
+        }
+
+        ProxyOozieClient ozClient = OozieClientFactory.get(context.getCluster().getCluster());
+        return ozClient.getBundleJobsInfo("name=FALCON_PROCESS_" + context.getProcessName(), 0, 10);
+    }
+
+    public static boolean killOozieJobs(TestContext context) throws Exception {
+        if (context.getCluster() == null) {
+            return true;
+        }
+
+        ProxyOozieClient ozClient = getOozieClient(context);
+        List<BundleJob> bundles = getBundles(context);
+        if (bundles != null) {
+            for (BundleJob bundle : bundles) {
+                ozClient.kill(bundle.getId());
+            }
+        }
+
+        return false;
+    }
+
+    public static void waitForProcessWFtoStart(TestContext context) throws Exception {
+        waitForWorkflowStart(context, context.getProcessName());
+    }
+
+    public static void waitForWorkflowStart(TestContext context, String entityName) throws Exception {
+        for (int i = 0; i < 10; i++) {
+            List<WorkflowJob> jobs = getRunningJobs(context, entityName);
+            if (jobs != null && !jobs.isEmpty()) {
+                return;
+            }
+
+            System.out.println("Waiting for workflow to start");
+            Thread.sleep(i * 1000);
+        }
+
+        throw new Exception("Workflow for " + entityName + " hasn't started in oozie");
+    }
+
+    private static List<WorkflowJob> getRunningJobs(TestContext context, String entityName) throws Exception {
+        ProxyOozieClient ozClient = getOozieClient(context);
+        return ozClient.getJobsInfo(
+                ProxyOozieClient.FILTER_STATUS + '=' + Job.Status.RUNNING + ';'
+                        + ProxyOozieClient.FILTER_NAME + '=' + "FALCON_PROCESS_DEFAULT_" + entityName);
+    }
+
+    public static void waitForBundleStart(TestContext context, Job.Status... status) throws Exception {
+        ProxyOozieClient ozClient = getOozieClient(context);
+        List<BundleJob> bundles = getBundles(context);
+        if (bundles.isEmpty()) {
+            return;
+        }
+
+        Set<Job.Status> statuses = new HashSet<Job.Status>(Arrays.asList(status));
+        String bundleId = bundles.get(0).getId();
+        for (int i = 0; i < 15; i++) {
+            Thread.sleep(i * 1000);
+            BundleJob bundle = ozClient.getBundleJobInfo(bundleId);
+            if (statuses.contains(bundle.getStatus())) {
+                if (statuses.contains(Job.Status.FAILED) || statuses.contains(Job.Status.KILLED)) {
+                    return;
+                }
+
+                boolean done = false;
+                for (CoordinatorJob coord : bundle.getCoordinators()) {
+                    if (statuses.contains(coord.getStatus())) {
+                        done = true;
+                    }
+                }
+                if (done) {
+                    return;
+                }
+            }
+            System.out.println("Waiting for bundle " + bundleId + " in " + statuses + " state");
+        }
+        throw new Exception("Bundle " + bundleId + " is not " + statuses + " in oozie");
+    }
+
     public static WorkflowJob getWorkflowJob(Cluster cluster, String filter) throws Exception {
-        OozieClient ozClient = OozieClientFactory.get(cluster);
+        ProxyOozieClient ozClient = getOozieClient(cluster);
 
         List<WorkflowJob> jobs;
         while (true) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/util/ResourcesReflectionUtilTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/util/ResourcesReflectionUtilTest.java b/webapp/src/test/java/org/apache/falcon/util/ResourcesReflectionUtilTest.java
index e54f81c..bd63a07 100644
--- a/webapp/src/test/java/org/apache/falcon/util/ResourcesReflectionUtilTest.java
+++ b/webapp/src/test/java/org/apache/falcon/util/ResourcesReflectionUtilTest.java
@@ -45,7 +45,7 @@ public class ResourcesReflectionUtilTest {
 
         Assert.assertEquals("wf-instance-failed",
                 ResourcesReflectionUtil.getResourceMonitorName("GenericAlert.instrumentFailedInstance"));
-        Assert.assertEquals(new Integer(10),
+        Assert.assertEquals(new Integer(11),
                 ResourcesReflectionUtil.getResourceTimeTakenName("GenericAlert.instrumentFailedInstance"));
         Assert.assertEquals(null,
                 ResourcesReflectionUtil.getResourceTimeTakenName("GenericAlert.alertLateRerunFailed"));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/validation/ClusterEntityValidationIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/validation/ClusterEntityValidationIT.java b/webapp/src/test/java/org/apache/falcon/validation/ClusterEntityValidationIT.java
index 9299b5b..ab09474 100644
--- a/webapp/src/test/java/org/apache/falcon/validation/ClusterEntityValidationIT.java
+++ b/webapp/src/test/java/org/apache/falcon/validation/ClusterEntityValidationIT.java
@@ -87,7 +87,7 @@ public class ClusterEntityValidationIT {
     public void testClusterEntityWithInvalidInterfaces(Interfacetype interfacetype, String endpoint)
         throws Exception {
         overlay = context.getUniqueOverlay();
-        String filePath = context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        String filePath = TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
         InputStream stream = new FileInputStream(filePath);
         Cluster cluster = (Cluster) EntityType.CLUSTER.getUnmarshaller().unmarshal(stream);
         Assert.assertNotNull(cluster);
@@ -96,7 +96,7 @@ public class ClusterEntityValidationIT {
         Interface anInterface = ClusterHelper.getInterface(cluster, interfacetype);
         anInterface.setEndpoint(endpoint);
 
-        File tmpFile = context.getTempFile();
+        File tmpFile = TestContext.getTempFile();
         EntityType.CLUSTER.getMarshaller().marshal(cluster, tmpFile);
         ClientResponse response = context.submitFileToFalcon(EntityType.CLUSTER, tmpFile.getAbsolutePath());
         context.assertFailure(response);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/validation/FeedEntityValidationIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/validation/FeedEntityValidationIT.java b/webapp/src/test/java/org/apache/falcon/validation/FeedEntityValidationIT.java
index db24b9a..f6d941c 100644
--- a/webapp/src/test/java/org/apache/falcon/validation/FeedEntityValidationIT.java
+++ b/webapp/src/test/java/org/apache/falcon/validation/FeedEntityValidationIT.java
@@ -97,7 +97,7 @@ public class FeedEntityValidationIT {
         overlay.put("colo", "default"); // validations will be ignored if not default & tests fail
         overlay.put("tableUri", TABLE_URI);
 
-        String filePath = context.overlayParametersOverTemplate("/hive-table-feed.xml", overlay);
+        String filePath = TestContext.overlayParametersOverTemplate("/hive-table-feed.xml", overlay);
         InputStream stream = new FileInputStream(filePath);
         FeedEntityParser parser = (FeedEntityParser) EntityParserFactory.getParser(EntityType.FEED);
         Feed feed = parser.parse(stream);


[5/5] git commit: FALCON-11 Add support for security in Falcon. Contributed by Venkatesh Seetharam

Posted by ve...@apache.org.
FALCON-11 Add support for security in Falcon. Contributed by Venkatesh Seetharam


Project: http://git-wip-us.apache.org/repos/asf/incubator-falcon/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-falcon/commit/3c51f105
Tree: http://git-wip-us.apache.org/repos/asf/incubator-falcon/tree/3c51f105
Diff: http://git-wip-us.apache.org/repos/asf/incubator-falcon/diff/3c51f105

Branch: refs/heads/master
Commit: 3c51f1053a3524557d1e1e58f0a4fc778431e75c
Parents: 2cb42df
Author: Venkatesh Seetharam <ve...@hortonworks.com>
Authored: Sat Feb 15 19:30:01 2014 -0800
Committer: Venkatesh Seetharam <ve...@hortonworks.com>
Committed: Sat Feb 15 19:30:01 2014 -0800

----------------------------------------------------------------------
 CHANGES.txt                                     |   1 +
 client/pom.xml                                  |  24 +
 .../java/org/apache/falcon/cli/FalconCLI.java   |  43 +-
 .../org/apache/falcon/client/FalconClient.java  | 159 +++---
 common/pom.xml                                  |   5 +
 .../falcon/catalog/AbstractCatalogService.java  |  19 +-
 .../falcon/catalog/HiveCatalogService.java      |  88 ++-
 .../falcon/cleanup/AbstractCleanupHandler.java  |  19 +-
 .../falcon/cleanup/FeedCleanupHandler.java      |   2 +-
 .../apache/falcon/entity/CatalogStorage.java    |   6 -
 .../org/apache/falcon/entity/ClusterHelper.java |  24 +-
 .../org/apache/falcon/entity/EntityUtil.java    |   3 +-
 .../apache/falcon/entity/FileSystemStorage.java |   6 -
 .../java/org/apache/falcon/entity/Storage.java  |   9 -
 .../entity/parser/ClusterEntityParser.java      |  50 +-
 .../falcon/entity/parser/FeedEntityParser.java  |  29 +-
 .../entity/parser/ProcessEntityParser.java      |  13 +-
 .../falcon/entity/store/ConfigurationStore.java |  27 +-
 .../falcon/hadoop/HadoopClientFactory.java      | 202 +++++++
 .../AuthenticationInitializationService.java    | 122 ++++
 .../org/apache/falcon/security/CurrentUser.java |   3 +-
 .../falcon/security/FalconLoginModule.java      |  89 ---
 .../security/FalconSecurityConfiguration.java   |  52 --
 .../falcon/security/SecurityConstants.java      |  38 --
 .../apache/falcon/security/SecurityUtil.java    | 102 ++++
 .../org/apache/falcon/update/UpdateHelper.java  |  10 +-
 common/src/main/resources/startup.properties    |  64 ++-
 .../apache/falcon/entity/AbstractTestBase.java  |  13 +-
 .../falcon/entity/FileSystemStorageTest.java    |  12 -
 .../falcon/hadoop/HadoopClientFactoryTest.java  | 101 ++++
 ...AuthenticationInitializationServiceTest.java | 142 +++++
 .../falcon/security/SecurityUtilTest.java       |  69 +++
 docs/src/site/twiki/Security.twiki              | 193 +++++++
 docs/src/site/twiki/index.twiki                 |   3 +-
 docs/src/site/twiki/restapi/AdminConfig.twiki   |   1 -
 docs/src/site/twiki/restapi/AdminStack.twiki    |   1 -
 docs/src/site/twiki/restapi/AdminVersion.twiki  |   1 -
 .../site/twiki/restapi/EntityDefinition.twiki   |   1 -
 docs/src/site/twiki/restapi/EntityDelete.twiki  |   1 -
 .../site/twiki/restapi/EntityDependencies.twiki |   1 -
 docs/src/site/twiki/restapi/EntityList.twiki    |   2 -
 docs/src/site/twiki/restapi/EntityResume.twiki  |   1 -
 .../src/site/twiki/restapi/EntitySchedule.twiki |   1 -
 docs/src/site/twiki/restapi/EntityStatus.twiki  |   1 -
 docs/src/site/twiki/restapi/EntitySubmit.twiki  |   4 +-
 .../twiki/restapi/EntitySubmitAndSchedule.twiki |   1 -
 docs/src/site/twiki/restapi/EntitySuspend.twiki |   1 -
 docs/src/site/twiki/restapi/EntityUpdate.twiki  |   1 -
 .../src/site/twiki/restapi/EntityValidate.twiki |   6 +-
 docs/src/site/twiki/restapi/InstanceKill.twiki  |   1 -
 docs/src/site/twiki/restapi/InstanceLogs.twiki  |   1 -
 docs/src/site/twiki/restapi/InstanceRerun.twiki |   1 -
 .../src/site/twiki/restapi/InstanceResume.twiki |   1 -
 .../site/twiki/restapi/InstanceRunning.twiki    |   1 -
 .../src/site/twiki/restapi/InstanceStatus.twiki |   1 -
 .../site/twiki/restapi/InstanceSuspend.twiki    |   1 -
 docs/src/site/twiki/restapi/ResourceList.twiki  |  24 +
 .../falcon/converter/OozieFeedMapper.java       |  16 +-
 .../config/workflow/replication-workflow.xml    |   4 +
 .../config/workflow/retention-workflow.xml      |   6 +-
 .../falcon/converter/OozieFeedMapperTest.java   |   3 +
 hadoop-webapp/pom.xml                           |   5 -
 html5-ui/js/falcon.js                           |  13 +-
 messaging/pom.xml                               |   5 +
 .../falcon/messaging/EntityInstanceMessage.java |   3 +-
 .../messaging/EntityInstanceMessageCreator.java |  10 +-
 .../falcon/messaging/MessageProducer.java       |  13 +-
 .../messaging/FalconTopicProducerTest.java      |   6 +-
 .../falcon/messaging/FeedProducerTest.java      |   3 +
 .../falcon/messaging/ProcessProducerTest.java   |   5 +-
 .../org/apache/falcon/aspect/GenericAlert.java  |   4 +
 .../converter/AbstractOozieEntityMapper.java    |  40 +-
 .../org/apache/falcon/logging/LogMover.java     |   2 +-
 .../org/apache/falcon/logging/LogProvider.java  |  20 +-
 .../service/SharedLibraryHostingService.java    |  19 +-
 .../falcon/workflow/FalconPostProcessing.java   |  15 +-
 .../workflow/engine/OozieClientFactory.java     |  24 +-
 .../engine/OozieHouseKeepingService.java        |   3 +-
 .../workflow/engine/OozieWorkflowEngine.java    |  89 ++-
 .../apache/oozie/client/CustomOozieClient.java  | 101 ----
 .../apache/oozie/client/ProxyOozieClient.java   | 562 +++++++++++++++++++
 .../workflow/FalconPostProcessingTest.java      |   5 +
 pom.xml                                         |  17 +-
 .../falcon/resource/channel/HTTPChannel.java    |  24 +-
 .../apache/falcon/security/BasicAuthFilter.java | 186 ++++--
 ...eUserInHeaderBasedAuthenticationHandler.java |  49 ++
 .../falcon/service/FalconTopicSubscriber.java   |  67 +--
 .../service/ProcessSubscriberService.java       |   4 +-
 .../apache/falcon/aspect/GenericAlertTest.java  |   4 +-
 .../service/FalconTopicSubscriberTest.java      |   8 +-
 .../falcon/converter/OozieProcessMapper.java    |  43 +-
 .../config/workflow/process-parent-workflow.xml |   4 +
 .../converter/OozieProcessMapperTest.java       |   3 +
 .../apache/falcon/latedata/LateDataHandler.java |  22 +-
 .../apache/falcon/rerun/event/LaterunEvent.java |   9 +-
 .../apache/falcon/rerun/event/RerunEvent.java   |   8 +-
 .../falcon/rerun/event/RerunEventFactory.java   |   4 +-
 .../apache/falcon/rerun/event/RetryEvent.java   |   6 +-
 .../rerun/handler/AbstractRerunConsumer.java    |   8 +-
 .../rerun/handler/AbstractRerunHandler.java     |   7 +-
 .../falcon/rerun/handler/LateRerunConsumer.java |  26 +-
 .../falcon/rerun/handler/LateRerunHandler.java  |  18 +-
 .../falcon/rerun/handler/RetryConsumer.java     |   6 +-
 .../falcon/rerun/handler/RetryHandler.java      |  15 +-
 .../falcon/rerun/queue/InMemoryQueue.java       |   5 +-
 .../rerun/handler/TestLateRerunHandler.java     |   2 +-
 .../apache/falcon/rerun/queue/ActiveMQTest.java |   2 +-
 .../falcon/rerun/queue/InMemoryQueueTest.java   |   6 +-
 src/bin/falcon                                  |   2 +-
 src/conf/log4j.xml                              |  29 +
 src/conf/startup.properties                     |  68 ++-
 .../falcon/cluster/util/EmbeddedCluster.java    |  10 +-
 webapp/pom.xml                                  |   5 +
 webapp/src/conf/oozie/conf/oozie-site.xml       |  44 +-
 webapp/src/main/resources/log4j.xml             |  29 +
 .../falcon/catalog/HiveCatalogServiceIT.java    |  16 +-
 .../java/org/apache/falcon/cli/FalconCLIIT.java |  72 +--
 .../org/apache/falcon/cli/FalconCLISmokeIT.java |  18 +-
 .../apache/falcon/late/LateDataHandlerIT.java   |   2 +-
 .../lifecycle/FileSystemFeedReplicationIT.java  |  40 +-
 .../lifecycle/TableStorageFeedEvictorIT.java    |   1 +
 .../TableStorageFeedReplicationIT.java          |  20 +-
 .../org/apache/falcon/process/PigProcessIT.java |  12 +-
 .../falcon/process/TableStorageProcessIT.java   |  16 +-
 .../falcon/resource/EntityManagerJerseyIT.java  | 235 ++++----
 .../resource/EntityManagerJerseySmokeIT.java    |  36 +-
 .../resource/ProcessInstanceManagerIT.java      |  33 +-
 .../org/apache/falcon/resource/TestContext.java | 258 +++------
 .../falcon/security/BasicAuthFilterTest.java    |  92 ++-
 .../org/apache/falcon/util/OozieTestUtils.java  | 104 +++-
 .../util/ResourcesReflectionUtilTest.java       |   2 +-
 .../validation/ClusterEntityValidationIT.java   |   4 +-
 .../validation/FeedEntityValidationIT.java      |   2 +-
 133 files changed, 3123 insertions(+), 1353 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 0462902..ecab28a 100755
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -3,6 +3,7 @@ Apache Falcon (incubating) Change log
 Trunk (Unreleased)
 
   INCOMPATIBLE CHANGES
+    FALCON-11 Add support for security in Falcon (Venkatesh Seetharam)
 
   NEW FEATURES
     FALCON-281 Design Action Interface. (Srikanth Sundarrajan)

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/client/pom.xml
----------------------------------------------------------------------
diff --git a/client/pom.xml b/client/pom.xml
index a43f7f5..63c4cce 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -32,6 +32,30 @@
     <name>Apache Falcon CLI client</name>
     <packaging>jar</packaging>
 
+    <profiles>
+        <profile>
+            <id>hadoop-1</id>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+            </activation>
+            <dependencies>
+                <dependency>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-core</artifactId>
+                </dependency>
+            </dependencies>
+        </profile>
+        <profile>
+            <id>hadoop-2</id>
+            <dependencies>
+                <dependency>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-auth</artifactId>
+                </dependency>
+            </dependencies>
+        </profile>
+    </profiles>
+
     <dependencies>
         <dependency>
             <groupId>commons-cli</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/client/src/main/java/org/apache/falcon/cli/FalconCLI.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/cli/FalconCLI.java b/client/src/main/java/org/apache/falcon/cli/FalconCLI.java
index aa712ad..a414e32 100644
--- a/client/src/main/java/org/apache/falcon/cli/FalconCLI.java
+++ b/client/src/main/java/org/apache/falcon/cli/FalconCLI.java
@@ -138,12 +138,18 @@ public class FalconCLI {
             int exitValue = 0;
             if (command.getName().equals(HELP_CMD)) {
                 parser.showHelp();
-            } else if (command.getName().equals(ADMIN_CMD)) {
-                exitValue = adminCommand(command.getCommandLine());
-            } else if (command.getName().equals(ENTITY_CMD)) {
-                entityCommand(command.getCommandLine());
-            } else if (command.getName().equals(INSTANCE_CMD)) {
-                instanceCommand(command.getCommandLine());
+            } else {
+                CommandLine commandLine = command.getCommandLine();
+                String falconUrl = getFalconEndpoint(commandLine);
+                FalconClient client = new FalconClient(falconUrl);
+
+                if (command.getName().equals(ADMIN_CMD)) {
+                    exitValue = adminCommand(commandLine, client, falconUrl);
+                } else if (command.getName().equals(ENTITY_CMD)) {
+                    entityCommand(commandLine, client);
+                } else if (command.getName().equals(INSTANCE_CMD)) {
+                    instanceCommand(commandLine, client);
+                }
             }
 
             return exitValue;
@@ -167,10 +173,8 @@ public class FalconCLI {
         }
     }
 
-    private void instanceCommand(CommandLine commandLine) throws FalconCLIException, IOException {
-        String falconUrl = getFalconEndpoint(commandLine);
-        FalconClient client = new FalconClient(falconUrl);
-
+    private void instanceCommand(CommandLine commandLine, FalconClient client)
+        throws FalconCLIException, IOException {
         Set<String> optionsList = new HashSet<String>();
         for (Option option : commandLine.getOptions()) {
             optionsList.add(option.getOpt());
@@ -257,12 +261,8 @@ public class FalconCLI {
         }
     }
 
-    private void entityCommand(CommandLine commandLine)
+    private void entityCommand(CommandLine commandLine, FalconClient client)
         throws FalconCLIException, IOException {
-
-        String falconUrl = getFalconEndpoint(commandLine);
-        FalconClient client = new FalconClient(falconUrl);
-
         Set<String> optionsList = new HashSet<String>();
         for (Option option : commandLine.getOptions()) {
             optionsList.add(option.getOpt());
@@ -395,9 +395,12 @@ public class FalconCLI {
                 "show the current system status");
         Option version = new Option(VERSION_OPTION, false,
                 "show Falcon server build version");
+        Option stack = new Option(STACK_OPTION, false,
+                "show the thread stack dump");
         Option help = new Option("help", false, "show Falcon help");
         group.addOption(status);
         group.addOption(version);
+        group.addOption(stack);
         group.addOption(help);
 
         adminOptions.addOptionGroup(group);
@@ -587,11 +590,9 @@ public class FalconCLI {
         return url;
     }
 
-    private int adminCommand(CommandLine commandLine) throws FalconCLIException, IOException {
+    private int adminCommand(CommandLine commandLine, FalconClient client,
+                             String falconUrl) throws FalconCLIException, IOException {
         String result;
-        String falconUrl = getFalconEndpoint(commandLine);
-        FalconClient client = new FalconClient(falconUrl);
-
         Set<String> optionsList = new HashSet<String>();
         for (Option option : commandLine.getOptions()) {
             optionsList.add(option.getOpt());
@@ -603,9 +604,8 @@ public class FalconCLI {
         }
         int exitValue = 0;
         if (optionsList.contains(STATUS_OPTION)) {
-            int status = 0;
             try {
-                status = client.getStatus();
+                int status = client.getStatus();
                 if (status != 200) {
                     ERR.get().println("Falcon server is not fully operational (on " + falconUrl + "). "
                             + "Please check log files.");
@@ -623,6 +623,7 @@ public class FalconCLI {
         } else if (optionsList.contains(HELP_CMD)) {
             OUT.get().println("Falcon Help");
         }
+
         return exitValue;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/client/src/main/java/org/apache/falcon/client/FalconClient.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/falcon/client/FalconClient.java b/client/src/main/java/org/apache/falcon/client/FalconClient.java
index 367fcc5..a5c31c2 100644
--- a/client/src/main/java/org/apache/falcon/client/FalconClient.java
+++ b/client/src/main/java/org/apache/falcon/client/FalconClient.java
@@ -27,6 +27,9 @@ import org.apache.falcon.entity.v0.SchemaHelper;
 import org.apache.falcon.resource.APIResult;
 import org.apache.falcon.resource.EntityList;
 import org.apache.falcon.resource.InstancesResult;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
+import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
 import org.apache.falcon.resource.InstancesSummaryResult;
 
 import javax.ws.rs.HttpMethod;
@@ -41,6 +44,7 @@ import java.io.FileReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.UnsupportedEncodingException;
+import java.net.URL;
 import java.util.Date;
 import java.util.Map;
 import java.util.Properties;
@@ -51,55 +55,83 @@ import java.util.Properties;
  */
 public class FalconClient {
 
-    private final WebResource service;
     public static final String WS_HEADER_PREFIX = "header:";
-    private static final String REMOTE_USER = "Remote-User";
-    private static final String USER = System.getProperty("user.name");
+    public static final String USER = System.getProperty("user.name");
+    public static final String AUTH_URL = "api/options?" + PseudoAuthenticator.USER_NAME + "=" + USER;
+
     private static final String FALCON_INSTANCE_ACTION_CLUSTERS = "falcon.instance.action.clusters";
     private static final String FALCON_INSTANCE_SOURCE_CLUSTERS = "falcon.instance.source.clusters";
 
     /**
+     * Name of the HTTP cookie used for the authentication token between the client and the server.
+     */
+    public static final String AUTH_COOKIE = "hadoop.auth";
+    private static final String AUTH_COOKIE_EQ = AUTH_COOKIE + "=";
+    private static final KerberosAuthenticator AUTHENTICATOR = new KerberosAuthenticator();
+
+    private final WebResource service;
+    private final AuthenticatedURL.Token authenticationToken;
+
+    /**
      * Create a Falcon client instance.
      *
      * @param falconUrl of the server to which client interacts
-     * @throws IOException
+     * @throws FalconCLIException
      */
-    public FalconClient(String falconUrl) throws IOException {
+    public FalconClient(String falconUrl) throws FalconCLIException {
         String baseUrl = notEmpty(falconUrl, "FalconUrl");
         if (!baseUrl.endsWith("/")) {
             baseUrl += "/";
         }
+
         Client client = Client.create(new DefaultClientConfig());
         setFalconTimeOut(client);
         service = client.resource(UriBuilder.fromUri(baseUrl).build());
-        client.resource(UriBuilder.fromUri(baseUrl).build());
 
-        // addHeaders();
+        authenticationToken = getToken(baseUrl);
     }
 
-    private void setFalconTimeOut(Client client) throws IOException {
-        Properties prop = new Properties();
-        int readTimeout;
-        int connectTimeout;
-        InputStream inputStream = null;
+    private void setFalconTimeOut(Client client) throws FalconCLIException {
         try {
-            inputStream = FalconClient.class.getResourceAsStream("/client.properties");
-            if (inputStream != null) {
-                prop.load(inputStream);
-                readTimeout = prop.containsKey("falcon.read.timeout") ? Integer
-                        .parseInt(prop.getProperty("falcon.read.timeout")) : 180000;
-                connectTimeout = prop.containsKey("falcon.connect.timeout") ? Integer
-                        .parseInt(prop.getProperty("falcon.connect.timeout"))
-                        : 180000;
-            } else {
-                readTimeout = 180000;
-                connectTimeout = 180000;
+            Properties prop = new Properties();
+            int readTimeout;
+            int connectTimeout;
+            InputStream inputStream = null;
+            try {
+                inputStream = FalconClient.class.getResourceAsStream("/client.properties");
+                if (inputStream != null) {
+                    prop.load(inputStream);
+                    readTimeout = prop.containsKey("falcon.read.timeout") ? Integer
+                            .parseInt(prop.getProperty("falcon.read.timeout")) : 180000;
+                    connectTimeout = prop.containsKey("falcon.connect.timeout") ? Integer
+                            .parseInt(prop.getProperty("falcon.connect.timeout"))
+                            : 180000;
+                } else {
+                    readTimeout = 180000;
+                    connectTimeout = 180000;
+                }
+            } finally {
+                IOUtils.closeQuietly(inputStream);
             }
-        } finally {
-            IOUtils.closeQuietly(inputStream);
+            client.setConnectTimeout(connectTimeout);
+            client.setReadTimeout(readTimeout);
+        } catch (IOException e) {
+            throw new FalconCLIException("An error occurred while reading client.properties file.", e);
         }
-        client.setConnectTimeout(connectTimeout);
-        client.setReadTimeout(readTimeout);
+    }
+
+    public static AuthenticatedURL.Token getToken(String baseUrl) throws FalconCLIException {
+        AuthenticatedURL.Token currentToken = new AuthenticatedURL.Token();
+        try {
+            URL url = new URL(baseUrl + AUTH_URL);
+            // using KerberosAuthenticator which falls back to PsuedoAuthenticator
+            // instead of passing authentication type from the command line - bad factory
+            new AuthenticatedURL(AUTHENTICATOR).openConnection(url, currentToken);
+        } catch (Exception ex) {
+            throw new FalconCLIException("Could not authenticate, " + ex.getMessage(), ex);
+        }
+
+        return currentToken;
     }
 
     /**
@@ -234,10 +266,11 @@ public class FalconClient {
         if (effectiveTime != null) {
             resource = resource.queryParam("time", SchemaHelper.formatDateUTC(effectiveTime));
         }
-        ClientResponse clientResponse = resource.header(REMOTE_USER, USER)
+        ClientResponse clientResponse = resource
+                .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
                 .accept(operation.mimeType).type(MediaType.TEXT_XML)
                 .method(operation.method, ClientResponse.class, entityStream);
-        checkIfSuccessfull(clientResponse);
+        checkIfSuccessful(clientResponse);
         return parseAPIResult(clientResponse);
     }
 
@@ -253,7 +286,6 @@ public class FalconClient {
         throws FalconCLIException {
 
         return sendEntityRequest(Entities.STATUS, entityType, entityName, colo);
-
     }
 
     public String getDefinition(String entityType, String entityName)
@@ -261,7 +293,6 @@ public class FalconClient {
 
         return sendDefinitionRequest(Entities.DEFINITION, entityType,
                 entityName);
-
     }
 
     public String getDependency(String entityType, String entityName)
@@ -378,8 +409,9 @@ public class FalconClient {
     public int getStatus() throws FalconCLIException {
         AdminOperations job =  AdminOperations.VERSION;
         ClientResponse clientResponse = service.path(job.path)
-                .header(REMOTE_USER, USER).accept(job.mimeType)
-                .type(MediaType.TEXT_PLAIN).method(job.method, ClientResponse.class);
+                .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
+                .accept(job.mimeType).type(MediaType.TEXT_PLAIN)
+                .method(job.method, ClientResponse.class);
         return clientResponse.getStatus();
     }
 
@@ -443,11 +475,12 @@ public class FalconClient {
         if (colo != null) {
             resource = resource.queryParam("colo", colo);
         }
-        ClientResponse clientResponse = resource.header(REMOTE_USER, USER)
+        ClientResponse clientResponse = resource
+                .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
                 .accept(entities.mimeType).type(MediaType.TEXT_XML)
                 .method(entities.method, ClientResponse.class);
 
-        checkIfSuccessfull(clientResponse);
+        checkIfSuccessful(clientResponse);
 
         return parseAPIResult(clientResponse);
     }
@@ -455,24 +488,26 @@ public class FalconClient {
     private String sendDefinitionRequest(Entities entities, String entityType,
                                          String entityName) throws FalconCLIException {
 
-        ClientResponse clientResponse = service.path(entities.path)
-                .path(entityType).path(entityName).header(REMOTE_USER, USER)
+        ClientResponse clientResponse = service
+                .path(entities.path).path(entityType).path(entityName)
+                .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
                 .accept(entities.mimeType).type(MediaType.TEXT_XML)
                 .method(entities.method, ClientResponse.class);
 
-        checkIfSuccessfull(clientResponse);
+        checkIfSuccessful(clientResponse);
         return clientResponse.getEntity(String.class);
     }
 
     private String sendDependencyRequest(Entities entities, String entityType,
                                          String entityName) throws FalconCLIException {
 
-        ClientResponse clientResponse = service.path(entities.path)
-                .path(entityType).path(entityName).header(REMOTE_USER, USER)
+        ClientResponse clientResponse = service
+                .path(entities.path).path(entityType).path(entityName)
+                .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
                 .accept(entities.mimeType).type(MediaType.TEXT_XML)
                 .method(entities.method, ClientResponse.class);
 
-        checkIfSuccessfull(clientResponse);
+        checkIfSuccessful(clientResponse);
 
         return parseEntityList(clientResponse);
     }
@@ -480,12 +515,13 @@ public class FalconClient {
     private String sendListRequest(Entities entities, String entityType)
         throws FalconCLIException {
 
-        ClientResponse clientResponse = service.path(entities.path)
-                .path(entityType).header(REMOTE_USER, USER)
+        ClientResponse clientResponse = service
+                .path(entities.path).path(entityType)
+                .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
                 .accept(entities.mimeType).type(MediaType.TEXT_XML)
                 .method(entities.method, ClientResponse.class);
 
-        checkIfSuccessfull(clientResponse);
+        checkIfSuccessful(clientResponse);
 
         return parseEntityList(clientResponse);
     }
@@ -497,29 +533,16 @@ public class FalconClient {
         if (colo != null) {
             resource = resource.queryParam("colo", colo);
         }
-        ClientResponse clientResponse = resource.header(REMOTE_USER, USER)
+        ClientResponse clientResponse = resource
+                .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
                 .accept(entities.mimeType).type(MediaType.TEXT_XML)
                 .method(entities.method, ClientResponse.class, requestObject);
 
-        checkIfSuccessfull(clientResponse);
+        checkIfSuccessful(clientResponse);
 
         return parseAPIResult(clientResponse);
     }
 
-    public InstancesResult instanceCmd(Instances instances, String type, String name,
-                                       String start, String end, String colo) {
-        WebResource resource = service.path(instances.path).path(type).path(name);
-        resource = resource.queryParam("start", start);
-        if (end != null) {
-            resource = resource.queryParam("end", end);
-        }
-        resource = resource.queryParam("colo", colo);
-
-        return resource.header(REMOTE_USER, USER)
-                .accept(instances.mimeType)
-                .method(instances.method, InstancesResult.class);
-    }
-
     //SUSPEND CHECKSTYLE CHECK VisibilityModifierCheck
     private String sendInstanceRequest(Instances instances, String type,
                                        String entity, String start, String end, InputStream props,
@@ -541,15 +564,17 @@ public class FalconClient {
 
         ClientResponse clientResponse;
         if (props == null) {
-            clientResponse = resource.header(REMOTE_USER, USER)
+            clientResponse = resource
+                    .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
                     .accept(instances.mimeType)
                     .method(instances.method, ClientResponse.class);
         } else {
-            clientResponse = resource.header(REMOTE_USER, USER)
+            clientResponse = resource
+                    .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
                     .accept(instances.mimeType)
                     .method(instances.method, ClientResponse.class, props);
         }
-        checkIfSuccessfull(clientResponse);
+        checkIfSuccessful(clientResponse);
 
         if (instances.name().equals("LOG")) {
             return parseProcessInstanceResultLogs(clientResponse, runid);
@@ -566,8 +591,10 @@ public class FalconClient {
         throws FalconCLIException {
 
         ClientResponse clientResponse = service.path(job.path)
-                .header(REMOTE_USER, USER).accept(job.mimeType)
-                .type(MediaType.TEXT_PLAIN).method(job.method, ClientResponse.class);
+                .header("Cookie", AUTH_COOKIE_EQ + authenticationToken)
+                .accept(job.mimeType)
+                .type(job.mimeType)
+                .method(job.method, ClientResponse.class);
         return parseStringResult(clientResponse);
     }
 
@@ -720,7 +747,7 @@ public class FalconClient {
         return sb.toString();
     }
 
-    private void checkIfSuccessfull(ClientResponse clientResponse)
+    private void checkIfSuccessful(ClientResponse clientResponse)
         throws FalconCLIException {
 
         if (clientResponse.getStatus() == Response.Status.BAD_REQUEST

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index 068a22c..c55c989 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -153,6 +153,11 @@
         </dependency>
 
         <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+        </dependency>
+
+        <dependency>
             <groupId>net.sourceforge.findbugs</groupId>
             <artifactId>annotations</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/catalog/AbstractCatalogService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/catalog/AbstractCatalogService.java b/common/src/main/java/org/apache/falcon/catalog/AbstractCatalogService.java
index 691d805..fc9c3b1 100644
--- a/common/src/main/java/org/apache/falcon/catalog/AbstractCatalogService.java
+++ b/common/src/main/java/org/apache/falcon/catalog/AbstractCatalogService.java
@@ -32,11 +32,13 @@ public abstract class AbstractCatalogService {
     /**
      * This method checks if the catalog service is alive.
      *
-     * @param catalogBaseUrl url for the catalog service
+     * @param catalogUrl url for the catalog service
+     * @param metaStorePrincipal kerberos principal for hive metastore as this is executed in falcon on behalf of user
      * @return if the service was reachable
      * @throws FalconException exception
      */
-    public abstract boolean isAlive(String catalogBaseUrl) throws FalconException;
+    public abstract boolean isAlive(String catalogUrl,
+                                    String metaStorePrincipal) throws FalconException;
 
     /**
      * This method checks if the given table exists in the catalog.
@@ -44,14 +46,15 @@ public abstract class AbstractCatalogService {
      * @param catalogUrl url for the catalog service
      * @param database database the table belongs to
      * @param tableName tableName to check if it exists
+     * @param metaStorePrincipal kerberos principal for hive metastore as this is executed in falcon on behalf of user
      * @return if the table exists
      * @throws FalconException exception
      */
-    public abstract boolean tableExists(String catalogUrl, String database, String tableName)
-        throws FalconException;
+    public abstract boolean tableExists(String catalogUrl, String database, String tableName,
+                                        String metaStorePrincipal) throws FalconException;
 
     /**
-     * Returns if the table is external or not.
+     * Returns if the table is external or not. Executed in the workflow engine.
      *
      * @param catalogUrl url for the catalog service
      * @param database database the table belongs to
@@ -63,7 +66,7 @@ public abstract class AbstractCatalogService {
                                             String tableName) throws FalconException;
 
     /**
-     * List partitions by filter.
+     * List partitions by filter. Executed in the workflow engine.
      *
      * @param catalogUrl url for the catalog service
      * @param database database the table belongs to
@@ -79,7 +82,7 @@ public abstract class AbstractCatalogService {
         throws FalconException;
 
     /**
-     * Drops a given partition.
+     * Drops a given partition. Executed in the workflow engine.
      *
      * @param catalogUrl url for the catalog service
      * @param database database the table belongs to
@@ -92,7 +95,7 @@ public abstract class AbstractCatalogService {
                                            Map<String, String> partitions) throws FalconException;
 
     /**
-     * Gets the partition.
+     * Gets the partition. Executed in the workflow engine.
      *
      * @param catalogUrl url for the catalog service
      * @param database database the table belongs to

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java b/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
index 51e4d6e..3c3660e 100644
--- a/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
+++ b/common/src/main/java/org/apache/falcon/catalog/HiveCatalogService.java
@@ -23,6 +23,7 @@ import org.apache.falcon.entity.ClusterHelper;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hcatalog.api.HCatClient;
 import org.apache.hcatalog.api.HCatDatabase;
 import org.apache.hcatalog.api.HCatPartition;
@@ -32,6 +33,8 @@ import org.apache.hcatalog.common.HCatException;
 import org.apache.hcatalog.data.schema.HCatFieldSchema;
 import org.apache.log4j.Logger;
 
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -67,63 +70,94 @@ public class HiveCatalogService extends AbstractCatalogService {
 
     private static HCatClient getHCatClient(String metastoreUrl) throws FalconException {
         try {
-            HiveConf hcatConf = new HiveConf();
-            hcatConf.set("hive.metastore.local", "false");
-            hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUrl);
-            hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
-            hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
-                    HCatSemanticAnalyzer.class.getName());
-            hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-
-            hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
-            hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-
+            HiveConf hcatConf = createHiveConf(metastoreUrl);
             return HCatClient.create(hcatConf);
         } catch (HCatException e) {
-            throw new FalconException(e);
+            throw new FalconException("Exception creating HCatClient: " + e.getMessage(), e);
         }
     }
 
+    private static HiveConf createHiveConf(String metastoreUrl) {
+        HiveConf hcatConf = new HiveConf();
+        hcatConf.set("hive.metastore.local", "false");
+        hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUrl);
+        hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+        hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
+                HCatSemanticAnalyzer.class.getName());
+        hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+        hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+        hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+        return hcatConf;
+    }
+
+    public static synchronized HCatClient getProxiedClient(String catalogUrl,
+                                                           String metaStorePrincipal) throws FalconException {
+        if (!CACHE.containsKey(catalogUrl)) {
+            try {
+                final HiveConf hcatConf = createHiveConf(catalogUrl);
+                if (UserGroupInformation.isSecurityEnabled()) {
+                    hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, metaStorePrincipal);
+                    hcatConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true");
+                }
+
+                LOG.info("Creating and caching HCatalog client object for " + catalogUrl);
+                UserGroupInformation currentUser = UserGroupInformation.getLoginUser();
+                HCatClient hcatClient = currentUser.doAs(new PrivilegedExceptionAction<HCatClient>() {
+                    public HCatClient run() throws Exception {
+                        return HCatClient.create(hcatConf);
+                    }
+                });
+                CACHE.putIfAbsent(catalogUrl, hcatClient);
+            } catch (IOException e) {
+                throw new FalconException("Exception creating Proxied HCatClient: " + e.getMessage(), e);
+            } catch (InterruptedException e) {
+                throw new FalconException("Exception creating Proxied HCatClient: " + e.getMessage(), e);
+            }
+        }
+
+        return CACHE.get(catalogUrl);
+    }
 
     @Override
-    public boolean isAlive(String catalogBaseUrl) throws FalconException {
-        LOG.info("Checking if the service is alive for: " + catalogBaseUrl);
+    public boolean isAlive(final String catalogUrl,
+                           final String metaStorePrincipal) throws FalconException {
+        LOG.info("Checking if the service is alive for: " + catalogUrl);
 
         try {
-            HCatClient client = get(catalogBaseUrl);
-            client.close();
+            HCatClient client = getProxiedClient(catalogUrl, metaStorePrincipal);
             HCatDatabase database = client.getDatabase("default");
             return database != null;
         } catch (HCatException e) {
-            throw new FalconException(e);
+            throw new FalconException("Exception checking if the service is alive:" + e.getMessage(), e);
         }
     }
 
     @Override
-    public boolean tableExists(String catalogUrl, String database, String tableName)
-        throws FalconException {
+    public boolean tableExists(final String catalogUrl, final String database, final String tableName,
+                               final String metaStorePrincipal) throws FalconException {
         LOG.info("Checking if the table exists: " + tableName);
 
         try {
-            HCatClient client = get(catalogUrl);
+            HCatClient client = getProxiedClient(catalogUrl, metaStorePrincipal);
             HCatTable table = client.getTable(database, tableName);
             return table != null;
         } catch (HCatException e) {
-            throw new FalconException(e);
+            throw new FalconException("Exception checking if the table exists:" + e.getMessage(), e);
         }
     }
 
     @Override
     public boolean isTableExternal(String catalogUrl, String database, String tableName)
         throws FalconException {
-        LOG.info("Returns a list of table properties for:" + tableName);
+        LOG.info("Checking if the table is external:" + tableName);
 
         try {
             HCatClient client = get(catalogUrl);
             HCatTable table = client.getTable(database, tableName);
             return !table.getTabletype().equals("MANAGED_TABLE");
         } catch (HCatException e) {
-            throw new FalconException(e);
+            throw new FalconException("Exception checking if the table is external:" + e.getMessage(), e);
         }
     }
 
@@ -145,7 +179,7 @@ public class HiveCatalogService extends AbstractCatalogService {
 
             return catalogPartitionList;
         } catch (HCatException e) {
-            throw new FalconException(e);
+            throw new FalconException("Exception listing partitions:" + e.getMessage(), e);
         }
     }
 
@@ -180,7 +214,7 @@ public class HiveCatalogService extends AbstractCatalogService {
             HCatClient client = get(catalogUrl);
             client.dropPartitions(database, tableName, partitions, true);
         } catch (HCatException e) {
-            throw new FalconException(e);
+            throw new FalconException("Exception dropping partitions:" + e.getMessage(), e);
         }
 
         return true;
@@ -189,14 +223,14 @@ public class HiveCatalogService extends AbstractCatalogService {
     @Override
     public CatalogPartition getPartition(String catalogUrl, String database, String tableName,
                                          Map<String, String> partitionSpec) throws FalconException {
-        LOG.info("List partitions for : " + tableName + ", partition spec: " + partitionSpec);
+        LOG.info("Fetch partition for : " + tableName + ", partition spec: " + partitionSpec);
 
         try {
             HCatClient client = get(catalogUrl);
             HCatPartition hCatPartition = client.getPartition(database, tableName, partitionSpec);
             return createCatalogPartition(hCatPartition);
         } catch (HCatException e) {
-            throw new FalconException(e);
+            throw new FalconException("Exception fetching partition:" + e.getMessage(), e);
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java b/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
index 644afd2..20d46c3 100644
--- a/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
+++ b/common/src/main/java/org/apache/falcon/cleanup/AbstractCleanupHandler.java
@@ -26,9 +26,9 @@ import org.apache.falcon.entity.v0.Frequency;
 import org.apache.falcon.entity.v0.Frequency.TimeUnit;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.expression.ExpressionHelper;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.util.RuntimeProperties;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -44,8 +44,8 @@ import java.io.IOException;
  */
 public abstract class AbstractCleanupHandler {
 
-    protected static final Logger LOG = Logger
-            .getLogger(AbstractCleanupHandler.class);
+    protected static final Logger LOG = Logger.getLogger(AbstractCleanupHandler.class);
+
     protected static final ConfigurationStore STORE = ConfigurationStore.get();
     public static final ExpressionEvaluator EVALUATOR = new ExpressionEvaluatorImpl();
     public static final ExpressionHelper RESOLVER = ExpressionHelper.get();
@@ -66,7 +66,6 @@ public abstract class AbstractCleanupHandler {
     private String getRetentionValue(Frequency.TimeUnit timeunit) {
         return RuntimeProperties.get().getProperty(
                 "log.cleanup.frequency." + timeunit + ".retention", "days(1)");
-
     }
 
     protected FileStatus[] getAllLogs(org.apache.falcon.entity.v0.cluster.Cluster cluster, Entity entity)
@@ -87,14 +86,7 @@ public abstract class AbstractCleanupHandler {
     protected FileSystem getFileSystem(org.apache.falcon.entity.v0.cluster.Cluster cluster)
         throws FalconException {
 
-        FileSystem fs;
-        try {
-            fs = new Path(ClusterHelper.getStorageUrl(cluster))
-                    .getFileSystem(new Configuration());
-        } catch (IOException e) {
-            throw new FalconException(e);
-        }
-        return fs;
+        return HadoopClientFactory.get().createFileSystem(ClusterHelper.getConfiguration(cluster));
     }
 
     protected void delete(Cluster cluster, Entity entity, long retention)
@@ -116,8 +108,7 @@ public abstract class AbstractCleanupHandler {
         for (FileStatus log : logs) {
             if (now - log.getModificationTime() > retention) {
                 try {
-                    boolean isDeleted = getFileSystem(cluster).delete(
-                            log.getPath(), true);
+                    boolean isDeleted = getFileSystem(cluster).delete(log.getPath(), true);
                     if (!isDeleted) {
                         LOG.error("Unable to delete path: " + log.getPath());
                     } else {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java b/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
index 7dbac58..58d2199 100644
--- a/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
+++ b/common/src/main/java/org/apache/falcon/cleanup/FeedCleanupHandler.java
@@ -56,7 +56,7 @@ public class FeedCleanupHandler extends AbstractCleanupHandler {
                     delete(currentCluster, feed, retention);
                     deleteStagedData(currentCluster, feed, retention);
                 } else {
-                    LOG.info("Ignoring cleanup for process:" + feedName
+                    LOG.info("Ignoring cleanup for feed:" + feedName
                             + " in  cluster: " + cluster.getName() + " as this does not belong to current colo");
                 }
             }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/CatalogStorage.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/CatalogStorage.java b/common/src/main/java/org/apache/falcon/entity/CatalogStorage.java
index 32f7605..ed9b238 100644
--- a/common/src/main/java/org/apache/falcon/entity/CatalogStorage.java
+++ b/common/src/main/java/org/apache/falcon/entity/CatalogStorage.java
@@ -19,7 +19,6 @@
 package org.apache.falcon.entity;
 
 import org.apache.falcon.FalconException;
-import org.apache.falcon.catalog.CatalogServiceFactory;
 import org.apache.falcon.entity.common.FeedDataPath;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
@@ -325,11 +324,6 @@ public class CatalogStorage implements Storage {
     }
 
     @Override
-    public boolean exists() throws FalconException {
-        return CatalogServiceFactory.getCatalogService().tableExists(catalogUrl, database, table);
-    }
-
-    @Override
     public boolean isIdentical(Storage toCompareAgainst) throws FalconException {
         CatalogStorage catalogStorage = (CatalogStorage) toCompareAgainst;
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java b/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java
index 38b5c5b..c0f3ee2 100644
--- a/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java
+++ b/common/src/main/java/org/apache/falcon/entity/ClusterHelper.java
@@ -18,14 +18,11 @@
 
 package org.apache.falcon.entity;
 
-import org.apache.falcon.FalconException;
 import org.apache.falcon.entity.v0.cluster.*;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import java.io.IOException;
-
 /**
  * Helper to get end points relating to the cluster.
  */
@@ -37,22 +34,21 @@ public final class ClusterHelper {
 
     public static Configuration getConfiguration(Cluster cluster) {
         Configuration conf = new Configuration();
-        conf.set("fs.default.name", getStorageUrl(cluster));
-        conf.set("mapred.job.tracker", getMREndPoint(cluster));
+
+        final String storageUrl = getStorageUrl(cluster);
+        conf.set(HadoopClientFactory.FS_DEFAULT_NAME_KEY, storageUrl);
+
+        final String executeEndPoint = getMREndPoint(cluster);
+        conf.set(HadoopClientFactory.MR_JOB_TRACKER_KEY, executeEndPoint);
+        conf.set(HadoopClientFactory.YARN_RM_ADDRESS_KEY, executeEndPoint);
+
         if (cluster.getProperties() != null) {
             for (Property prop : cluster.getProperties().getProperties()) {
                 conf.set(prop.getName(), prop.getValue());
             }
         }
-        return conf;
-    }
 
-    public static FileSystem getFileSystem(Cluster cluster) throws FalconException {
-        try {
-            return FileSystem.get(getConfiguration(cluster));
-        } catch (IOException e) {
-            throw new FalconException(e);
-        }
+        return conf;
     }
 
     public static String getOozieUrl(Cluster cluster) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/EntityUtil.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/EntityUtil.java b/common/src/main/java/org/apache/falcon/entity/EntityUtil.java
index a3ad83d..b4bc07d 100644
--- a/common/src/main/java/org/apache/falcon/entity/EntityUtil.java
+++ b/common/src/main/java/org/apache/falcon/entity/EntityUtil.java
@@ -34,6 +34,7 @@ import org.apache.falcon.entity.v0.feed.ClusterType;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.process.*;
 import org.apache.falcon.entity.v0.process.Process;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.util.DeploymentUtil;
 import org.apache.falcon.util.RuntimeProperties;
 import org.apache.hadoop.fs.FileStatus;
@@ -553,7 +554,7 @@ public final class EntityUtil {
     private static Path getStagingPath(org.apache.falcon.entity.v0.cluster.Cluster cluster, Path path)
         throws FalconException {
         try {
-            FileSystem fs = ClusterHelper.getFileSystem(cluster);
+            FileSystem fs = HadoopClientFactory.get().createFileSystem(ClusterHelper.getConfiguration(cluster));
             FileStatus latest = null;
             FileStatus[] files = fs.globStatus(path, new PathFilter() {
                 @Override

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/FileSystemStorage.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/FileSystemStorage.java b/common/src/main/java/org/apache/falcon/entity/FileSystemStorage.java
index 68370c7..41917c8 100644
--- a/common/src/main/java/org/apache/falcon/entity/FileSystemStorage.java
+++ b/common/src/main/java/org/apache/falcon/entity/FileSystemStorage.java
@@ -170,12 +170,6 @@ public class FileSystemStorage implements Storage {
     }
 
     @Override
-    public boolean exists() throws FalconException {
-        // Directories on FS will be created if they don't exist.
-        return true;
-    }
-
-    @Override
     public boolean isIdentical(Storage toCompareAgainst) throws FalconException {
         FileSystemStorage fsStorage = (FileSystemStorage) toCompareAgainst;
         final List<Location> fsStorageLocations = fsStorage.getLocations();

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/Storage.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/Storage.java b/common/src/main/java/org/apache/falcon/entity/Storage.java
index 0634969..60c87c5 100644
--- a/common/src/main/java/org/apache/falcon/entity/Storage.java
+++ b/common/src/main/java/org/apache/falcon/entity/Storage.java
@@ -65,15 +65,6 @@ public interface Storage {
     String getUriTemplate(LocationType locationType);
 
     /**
-     * Check if the storage, filesystem location or catalog table exists.
-     * Filesystem location always returns true.
-     *
-     * @return true if table exists else false
-     * @throws FalconException an exception
-     */
-    boolean exists() throws FalconException;
-
-    /**
      * Check for equality of this instance against the one in question.
      *
      * @param toCompareAgainst instance to compare

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
index e633838..831bfdc 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/ClusterEntityParser.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 
 import javax.jms.ConnectionFactory;
 
+import org.apache.commons.lang.Validate;
 import org.apache.falcon.FalconException;
 import org.apache.falcon.catalog.CatalogServiceFactory;
 import org.apache.falcon.entity.ClusterHelper;
@@ -29,16 +30,17 @@ import org.apache.falcon.entity.EntityUtil;
 import org.apache.falcon.entity.store.StoreAccessException;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.falcon.entity.v0.cluster.Interface;
+import org.apache.falcon.security.SecurityUtil;
 import org.apache.falcon.util.StartupProperties;
 import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.log4j.Logger;
+
 /**
  * Parser that parses cluster entity definition.
  */
@@ -51,8 +53,7 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
     }
 
     @Override
-    public void validate(Cluster cluster) throws StoreAccessException,
-                                                 ValidationException {
+    public void validate(Cluster cluster) throws StoreAccessException, ValidationException {
         // validating scheme in light of fail-early
         validateScheme(cluster, Interfacetype.READONLY);
         validateScheme(cluster, Interfacetype.WRITE);
@@ -88,23 +89,34 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
         final String readOnlyStorageUrl = ClusterHelper.getReadOnlyStorageUrl(cluster);
         LOG.info("Validating read interface: " + readOnlyStorageUrl);
 
-        validateFileSystem(readOnlyStorageUrl);
+        validateFileSystem(cluster, readOnlyStorageUrl);
     }
 
     private void validateWriteInterface(Cluster cluster) throws ValidationException {
         final String writeStorageUrl = ClusterHelper.getStorageUrl(cluster);
         LOG.info("Validating write interface: " + writeStorageUrl);
 
-        validateFileSystem(writeStorageUrl);
+        validateFileSystem(cluster, writeStorageUrl);
     }
 
-    private void validateFileSystem(String storageUrl) throws ValidationException {
+    private void validateFileSystem(Cluster cluster, String storageUrl) throws ValidationException {
         try {
             Configuration conf = new Configuration();
-            conf.set("fs.default.name", storageUrl);
+            conf.set(HadoopClientFactory.FS_DEFAULT_NAME_KEY, storageUrl);
             conf.setInt("ipc.client.connect.max.retries", 10);
-            FileSystem.get(conf);
-        } catch (IOException e) {
+
+            if (UserGroupInformation.isSecurityEnabled()) {
+                String nameNodePrincipal = ClusterHelper.getPropertyValue(cluster, SecurityUtil.NN_PRINCIPAL);
+                Validate.notEmpty(nameNodePrincipal,
+                    "Cluster definition missing required namenode credential property: " + SecurityUtil.NN_PRINCIPAL);
+
+                conf.set(SecurityUtil.NN_PRINCIPAL, nameNodePrincipal);
+            }
+
+            // todo: ideally check if the end user has access using createProxiedFileSystem
+            // hftp won't work and bug is logged at HADOOP-10215
+            HadoopClientFactory.get().createFileSystem(conf);
+        } catch (FalconException e) {
             throw new ValidationException("Invalid storage server or port: " + storageUrl, e);
         }
     }
@@ -114,11 +126,7 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
         LOG.info("Validating execute interface: " + executeUrl);
 
         try {
-            JobConf jobConf = new JobConf();
-            jobConf.set("mapred.job.tracker", executeUrl);
-            jobConf.set("yarn.resourcemanager.address", executeUrl);
-            JobClient jobClient = new JobClient(jobConf);
-            jobClient.getClusterStatus().getMapTasks();
+            HadoopClientFactory.validateJobClient(executeUrl);
         } catch (IOException e) {
             throw new ValidationException("Invalid Execute server or port: " + executeUrl, e);
         }
@@ -173,7 +181,15 @@ public class ClusterEntityParser extends EntityParser<Cluster> {
         LOG.info("Validating catalog registry interface: " + catalogUrl);
 
         try {
-            if (!CatalogServiceFactory.getCatalogService().isAlive(catalogUrl)) {
+            String metaStorePrincipal = null;
+            if (UserGroupInformation.isSecurityEnabled()) {
+                metaStorePrincipal = ClusterHelper.getPropertyValue(cluster, SecurityUtil.HIVE_METASTORE_PRINCIPAL);
+                Validate.notEmpty(metaStorePrincipal,
+                        "Cluster definition missing required metastore credential property: "
+                                + SecurityUtil.HIVE_METASTORE_PRINCIPAL);
+            }
+
+            if (!CatalogServiceFactory.getCatalogService().isAlive(catalogUrl, metaStorePrincipal)) {
                 throw new ValidationException("Unable to reach Catalog server:" + catalogUrl);
             }
         } catch (FalconException e) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
index 5c1d9ad..d138179 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/FeedEntityParser.java
@@ -20,7 +20,9 @@ package org.apache.falcon.entity.parser;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.falcon.FalconException;
+import org.apache.falcon.catalog.CatalogServiceFactory;
 import org.apache.falcon.entity.CatalogStorage;
+import org.apache.falcon.entity.ClusterHelper;
 import org.apache.falcon.entity.EntityUtil;
 import org.apache.falcon.entity.FeedHelper;
 import org.apache.falcon.entity.Storage;
@@ -38,6 +40,7 @@ import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.expression.ExpressionHelper;
 import org.apache.falcon.group.FeedGroup;
 import org.apache.falcon.group.FeedGroupMap;
+import org.apache.falcon.security.SecurityUtil;
 import org.apache.log4j.Logger;
 
 import java.util.Date;
@@ -344,6 +347,7 @@ public class FeedEntityParser extends EntityParser<Feed> {
     }
 
     private void validateStorageExists(Feed feed) throws FalconException {
+        StringBuilder buffer = new StringBuilder();
         for (Cluster cluster : feed.getClusters().getClusters()) {
             org.apache.falcon.entity.v0.cluster.Cluster clusterEntity =
                     EntityUtil.getEntity(EntityType.CLUSTER, cluster.getName());
@@ -352,12 +356,27 @@ public class FeedEntityParser extends EntityParser<Feed> {
             }
 
             final Storage storage = FeedHelper.createStorage(cluster, feed);
-            if (!storage.exists()) {
-                // this is only true for table, filesystem always returns true
-                CatalogStorage catalogStorage = (CatalogStorage) storage;
-                throw new ValidationException("Table [" + catalogStorage.getTable()
-                        + "] does not exist for feed: " + feed.getName() + ", cluster: " + cluster.getName());
+            // this is only true for table, filesystem always returns true
+            if (storage.getType() == Storage.TYPE.FILESYSTEM) {
+                continue;
+            }
+
+            CatalogStorage catalogStorage = (CatalogStorage) storage;
+            String metaStorePrincipal = ClusterHelper.getPropertyValue(clusterEntity,
+                    SecurityUtil.HIVE_METASTORE_PRINCIPAL);
+            if (!CatalogServiceFactory.getCatalogService().tableExists(catalogStorage.getCatalogUrl(),
+                    catalogStorage.getDatabase(), catalogStorage.getTable(), metaStorePrincipal)) {
+                buffer.append("Table [")
+                        .append(catalogStorage.getTable())
+                        .append("] does not exist for feed: ")
+                        .append(feed.getName())
+                        .append(" in cluster: ")
+                        .append(cluster.getName());
             }
         }
+
+        if (buffer.length() > 0) {
+            throw new ValidationException(buffer.toString());
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/parser/ProcessEntityParser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/parser/ProcessEntityParser.java b/common/src/main/java/org/apache/falcon/entity/parser/ProcessEntityParser.java
index 8647d43..837b86a 100644
--- a/common/src/main/java/org/apache/falcon/entity/parser/ProcessEntityParser.java
+++ b/common/src/main/java/org/apache/falcon/entity/parser/ProcessEntityParser.java
@@ -41,6 +41,7 @@ import org.apache.falcon.entity.v0.process.LateInput;
 import org.apache.falcon.entity.v0.process.Output;
 import org.apache.falcon.entity.v0.process.Outputs;
 import org.apache.falcon.entity.v0.process.Process;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -97,6 +98,13 @@ public class ProcessEntityParser extends EntityParser<Process> {
         validateLateInputs(process);
     }
 
+    /**
+     * Validate if the user submitting this entity has access to the specific dirs on HDFS.
+     *
+     * @param process process
+     * @param clusterName cluster the process is materialized on
+     * @throws FalconException
+     */
     private void validateHDFSPaths(Process process, String clusterName) throws FalconException {
         org.apache.falcon.entity.v0.cluster.Cluster cluster = ConfigurationStore.get().get(EntityType.CLUSTER,
                 clusterName);
@@ -109,9 +117,8 @@ public class ProcessEntityParser extends EntityParser<Process> {
         String libPath = process.getWorkflow().getLib();
         String nameNode = getNameNode(cluster, clusterName);
         try {
-            Configuration configuration = new Configuration();
-            configuration.set("fs.default.name", nameNode);
-            FileSystem fs = FileSystem.get(configuration);
+            Configuration configuration = ClusterHelper.getConfiguration(cluster);
+            FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(configuration);
             if (!fs.exists(new Path(workflowPath))) {
                 throw new ValidationException(
                         "Workflow path: " + workflowPath + " does not exists in HDFS: " + nameNode);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java b/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
index 18ceb6e..156fafe 100644
--- a/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
+++ b/common/src/main/java/org/apache/falcon/entity/store/ConfigurationStore.java
@@ -21,14 +21,16 @@ package org.apache.falcon.entity.store;
 import org.apache.falcon.FalconException;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.service.ConfigurationChangeListener;
 import org.apache.falcon.service.FalconService;
 import org.apache.falcon.util.ReflectionUtils;
 import org.apache.falcon.util.StartupProperties;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.log4j.Logger;
 
 import javax.xml.bind.JAXBException;
@@ -84,8 +86,26 @@ public final class ConfigurationStore implements FalconService {
 
         String uri = StartupProperties.get().getProperty("config.store.uri");
         storePath = new Path(uri);
+        fs = initializeFileSystem();
+    }
+
+    /**
+     * Falcon owns this dir on HDFS which no one has permissions to read.
+     *
+     * @return FileSystem handle
+     */
+    private FileSystem initializeFileSystem() {
         try {
-            fs = FileSystem.get(storePath.toUri(), new Configuration());
+            FileSystem fileSystem = HadoopClientFactory.get().createFileSystem(storePath.toUri());
+            if (!fileSystem.exists(storePath)) {
+                LOG.info("Creating configuration store directory: " + storePath);
+                fileSystem.mkdirs(storePath);
+                // set permissions so config store dir is owned by falcon alone
+                FsPermission permission = new FsPermission(FsAction.ALL, FsAction.NONE, FsAction.NONE);
+                fileSystem.setPermission(storePath, permission);
+            }
+
+            return fileSystem;
         } catch (Exception e) {
             throw new RuntimeException("Unable to bring up config store", e);
         }
@@ -305,8 +325,7 @@ public final class ConfigurationStore implements FalconService {
         Path archivePath = new Path(storePath, "archive" + Path.SEPARATOR + type);
         fs.mkdirs(archivePath);
         fs.rename(new Path(storePath, type + Path.SEPARATOR + URLEncoder.encode(name, UTF_8) + ".xml"),
-                new Path(archivePath,
-                        URLEncoder.encode(name, UTF_8) + "." + System.currentTimeMillis()));
+                new Path(archivePath, URLEncoder.encode(name, UTF_8) + "." + System.currentTimeMillis()));
         LOG.info("Archived configuration " + type + "/" + name);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java b/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java
new file mode 100644
index 0000000..d5fbda8
--- /dev/null
+++ b/common/src/main/java/org/apache/falcon/hadoop/HadoopClientFactory.java
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.hadoop;
+
+import org.apache.commons.lang.Validate;
+import org.apache.falcon.FalconException;
+import org.apache.falcon.security.CurrentUser;
+import org.apache.falcon.security.SecurityUtil;
+import org.apache.falcon.util.StartupProperties;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
+
+/**
+ * A factory implementation to dole out FileSystem handles based on the logged in user.
+ */
+public final class HadoopClientFactory {
+
+    public static final String FS_DEFAULT_NAME_KEY = "fs.default.name";
+    public static final String MR_JOB_TRACKER_KEY = "mapred.job.tracker";
+    public static final String YARN_RM_ADDRESS_KEY = "yarn.resourcemanager.address";
+
+    private static final HadoopClientFactory INSTANCE = new HadoopClientFactory();
+
+    private HadoopClientFactory() {
+    }
+
+    public static HadoopClientFactory get() {
+        return INSTANCE;
+    }
+
+    /**
+     * This method is only used by Falcon internally to talk to the config store on HDFS.
+     *
+     * @param uri file system URI for config store.
+     * @return FileSystem created with the provided proxyUser/group.
+     * @throws org.apache.falcon.FalconException
+     *          if the filesystem could not be created.
+     */
+    public FileSystem createFileSystem(final URI uri) throws FalconException {
+        Validate.notNull(uri, "uri cannot be null");
+
+        try {
+            Configuration conf = new Configuration();
+            if (UserGroupInformation.isSecurityEnabled()) {
+                conf.set(SecurityUtil.NN_PRINCIPAL, StartupProperties.get().getProperty(SecurityUtil.NN_PRINCIPAL));
+            }
+
+            return createFileSystem(UserGroupInformation.getLoginUser(), uri, conf);
+        } catch (IOException e) {
+            throw new FalconException("Exception while getting FileSystem for: " + uri, e);
+        }
+    }
+
+    public FileSystem createFileSystem(final Configuration conf)
+        throws FalconException {
+        Validate.notNull(conf, "configuration cannot be null");
+
+        String nameNode = conf.get(FS_DEFAULT_NAME_KEY);
+        try {
+            return createFileSystem(UserGroupInformation.getLoginUser(), new URI(nameNode), conf);
+        } catch (URISyntaxException e) {
+            throw new FalconException("Exception while getting FileSystem for: " + nameNode, e);
+        } catch (IOException e) {
+            throw new FalconException("Exception while getting FileSystem for: " + nameNode, e);
+        }
+    }
+
+    public FileSystem createFileSystem(final URI uri, final Configuration conf)
+        throws FalconException {
+        Validate.notNull(uri, "uri cannot be null");
+
+        try {
+            return createFileSystem(UserGroupInformation.getLoginUser(), uri, conf);
+        } catch (IOException e) {
+            throw new FalconException("Exception while getting FileSystem for: " + uri, e);
+        }
+    }
+
+    public FileSystem createProxiedFileSystem(final Configuration conf)
+        throws FalconException {
+        Validate.notNull(conf, "configuration cannot be null");
+
+        String nameNode = conf.get(FS_DEFAULT_NAME_KEY);
+        try {
+            return createProxiedFileSystem(CurrentUser.getUser(), new URI(nameNode), conf);
+        } catch (URISyntaxException e) {
+            throw new FalconException("Exception while getting FileSystem for: " + nameNode, e);
+        }
+    }
+
+    /**
+     * Return a FileSystem created with the provided proxyUser for the specified URI.
+     *
+     * @param proxyUser proxyUser
+     * @param uri  file system URI.
+     * @param conf Configuration with all necessary information to create the FileSystem.
+     * @return FileSystem created with the provided proxyUser/group.
+     * @throws org.apache.falcon.FalconException
+     *          if the filesystem could not be created.
+     */
+    public FileSystem createProxiedFileSystem(String proxyUser, final URI uri, final Configuration conf)
+        throws FalconException {
+        Validate.notEmpty(proxyUser, "proxyUser cannot be null");
+
+        try {
+            UserGroupInformation proxyUgi = SecurityUtil.getProxyUser(proxyUser);
+            return createFileSystem(proxyUgi, uri, conf);
+        } catch (IOException ex) {
+            throw new FalconException("Exception while getting FileSystem: " + ex.getMessage(), ex);
+        }
+    }
+
+    /**
+     * Return a FileSystem created with the provided user for the specified URI.
+     *
+     * @param ugi user group information
+     * @param uri  file system URI.
+     * @param conf Configuration with all necessary information to create the FileSystem.
+     * @return FileSystem created with the provided user/group.
+     * @throws org.apache.falcon.FalconException
+     *          if the filesystem could not be created.
+     */
+    @SuppressWarnings("ResultOfMethodCallIgnored")
+    public FileSystem createFileSystem(UserGroupInformation ugi, final URI uri, final Configuration conf)
+        throws FalconException {
+        Validate.notNull(ugi, "ugi cannot be null");
+        Validate.notNull(conf, "configuration cannot be null");
+
+        String nameNode = uri.getAuthority();
+        if (nameNode == null) {
+            nameNode = conf.get(FS_DEFAULT_NAME_KEY);
+            if (nameNode != null) {
+                try {
+                    new URI(nameNode).getAuthority();
+                } catch (URISyntaxException ex) {
+                    throw new FalconException("Exception while getting FileSystem", ex);
+                }
+            }
+        }
+
+        try {
+            return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+                public FileSystem run() throws Exception {
+                    return FileSystem.get(uri, conf);
+                }
+            });
+        } catch (InterruptedException ex) {
+            throw new FalconException("Exception creating FileSystem:" + ex.getMessage(), ex);
+        } catch (IOException ex) {
+            throw new FalconException("Exception creating FileSystem:" + ex.getMessage(), ex);
+        }
+    }
+
+    /**
+     * This method validates if the execute url is able to reach the MR endpoint.
+     *
+     * @param executeUrl jt url or RM url
+     * @throws IOException
+     */
+    public static void validateJobClient(String executeUrl) throws IOException {
+        final JobConf jobConf = new JobConf();
+        jobConf.set(MR_JOB_TRACKER_KEY, executeUrl);
+        jobConf.set(YARN_RM_ADDRESS_KEY, executeUrl);
+
+        UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
+        try {
+            JobClient jobClient = loginUser.doAs(new PrivilegedExceptionAction<JobClient>() {
+                public JobClient run() throws Exception {
+                    return new JobClient(jobConf);
+                }
+            });
+
+            jobClient.getClusterStatus().getMapTasks();
+        } catch (InterruptedException e) {
+            throw new IOException("Exception creating job client:" + e.getMessage(), e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java b/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
new file mode 100644
index 0000000..264d5b8
--- /dev/null
+++ b/common/src/main/java/org/apache/falcon/security/AuthenticationInitializationService.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.security;
+
+import org.apache.commons.lang.Validate;
+import org.apache.falcon.FalconException;
+import org.apache.falcon.service.FalconService;
+import org.apache.falcon.util.StartupProperties;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.log4j.Logger;
+
+import java.io.File;
+import java.util.Properties;
+
+
+/**
+ * Authentication Service at startup that initializes the authentication credentials
+ * based on authentication type. If Kerberos is enabled, it logs in the user with the key tab.
+ */
+public class AuthenticationInitializationService implements FalconService {
+
+    private static final Logger LOG = Logger.getLogger(AuthenticationInitializationService.class);
+
+    /**
+     * Constant for the configuration property that indicates the prefix.
+     */
+    protected static final String CONFIG_PREFIX = "falcon.service.authentication.";
+
+    /**
+     * Constant for the configuration property that indicates the keytab file path.
+     */
+    protected static final String KERBEROS_KEYTAB = CONFIG_PREFIX + KerberosAuthenticationHandler.KEYTAB;
+    /**
+     * Constant for the configuration property that indicates the kerberos principal.
+     */
+    protected static final String KERBEROS_PRINCIPAL = CONFIG_PREFIX + KerberosAuthenticationHandler.PRINCIPAL;
+
+
+    @Override
+    public String getName() {
+        return "Authentication initialization service";
+    }
+
+    @Override
+    public void init() throws FalconException {
+
+        if (SecurityUtil.isSecurityEnabled()) {
+            LOG.info("Falcon Kerberos Authentication Enabled!");
+            initializeKerberos();
+        } else {
+            LOG.info("Falcon Simple Authentication Enabled!");
+            Configuration ugiConf = new Configuration();
+            ugiConf.set("hadoop.security.authentication", "simple");
+            UserGroupInformation.setConfiguration(ugiConf);
+        }
+    }
+
+    protected void initializeKerberos() throws FalconException {
+        try {
+            Properties configuration = StartupProperties.get();
+            String principal = configuration.getProperty(KERBEROS_PRINCIPAL);
+            Validate.notEmpty(principal,
+                    "Missing required configuration property: " + KERBEROS_PRINCIPAL);
+            principal = org.apache.hadoop.security.SecurityUtil.getServerPrincipal(
+                    principal, SecurityUtil.getLocalHostName());
+
+            String keytabFilePath = configuration.getProperty(KERBEROS_KEYTAB);
+            Validate.notEmpty(keytabFilePath,
+                    "Missing required configuration property: " + KERBEROS_KEYTAB);
+            checkIsReadable(keytabFilePath);
+
+            Configuration conf = new Configuration();
+            conf.set("hadoop.security.authentication", "kerberos");
+
+            UserGroupInformation.setConfiguration(conf);
+            UserGroupInformation.loginUserFromKeytab(principal, keytabFilePath);
+
+            LOG.info("Got Kerberos ticket, keytab: " + keytabFilePath
+                    + ", Falcon principal principal: " + principal);
+        } catch (Exception ex) {
+            throw new FalconException("Could not initialize " + getName()
+                    + ": " + ex.getMessage(), ex);
+        }
+    }
+
+    private static void checkIsReadable(String keytabFilePath) {
+        File keytabFile = new File(keytabFilePath);
+        if (!keytabFile.exists()) {
+            throw new IllegalArgumentException("The keytab file does not exist! " + keytabFilePath);
+        }
+
+        if (!keytabFile.isFile()) {
+            throw new IllegalArgumentException("The keytab file cannot be a directory! " + keytabFilePath);
+        }
+
+        if (!keytabFile.canRead()) {
+            throw new IllegalArgumentException("The keytab file is not readable! " + keytabFilePath);
+        }
+    }
+
+    @Override
+    public void destroy() throws FalconException {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/security/CurrentUser.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/CurrentUser.java b/common/src/main/java/org/apache/falcon/security/CurrentUser.java
index 4d2299e..cd7d0b0 100644
--- a/common/src/main/java/org/apache/falcon/security/CurrentUser.java
+++ b/common/src/main/java/org/apache/falcon/security/CurrentUser.java
@@ -37,8 +37,7 @@ public final class CurrentUser {
         return INSTANCE;
     }
 
-    private final ThreadLocal<Subject> currentSubject =
-            new ThreadLocal<Subject>();
+    private final ThreadLocal<Subject> currentSubject = new ThreadLocal<Subject>();
 
     public static void authenticate(String user) {
         if (user == null || user.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/security/FalconLoginModule.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/FalconLoginModule.java b/common/src/main/java/org/apache/falcon/security/FalconLoginModule.java
deleted file mode 100644
index d95e147..0000000
--- a/common/src/main/java/org/apache/falcon/security/FalconLoginModule.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.security;
-
-
-import com.sun.security.auth.UnixPrincipal;
-import org.apache.log4j.Logger;
-
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.login.LoginException;
-import javax.security.auth.spi.LoginModule;
-import java.security.Principal;
-import java.util.Map;
-
-/**
- * Falcon JAAS login module.
- */
-public class FalconLoginModule implements LoginModule {
-    private static final Logger LOG = Logger.getLogger(FalconLoginModule.class);
-
-    private Subject subject;
-
-    public Subject getSubject() {
-        return subject;
-    }
-
-    @Override
-    public boolean abort() throws LoginException {
-        return true;
-    }
-
-    private <T extends Principal> T getCanonicalUser(Class<T> cls) {
-        for (T user : subject.getPrincipals(cls)) {
-            return user;
-        }
-        return null;
-    }
-
-    @Override
-    public boolean commit() throws LoginException {
-        if (!subject.getPrincipals(SecurityConstants.OS_PRINCIPAL_CLASS).
-                isEmpty()) {
-            return true;
-        }
-
-        Principal user = getCanonicalUser(SecurityConstants.OS_PRINCIPAL_CLASS);
-        if (user != null) {
-            subject.getPrincipals().add(new UnixPrincipal(user.getName()));
-            return true;
-        }
-        LOG.error("No such user " + subject);
-        throw new LoginException("No such user " + subject);
-    }
-
-    //SUSPEND CHECKSTYLE CHECK HiddenFieldCheck
-    @Override
-    public void initialize(Subject subject, CallbackHandler callbackHandler,
-                           Map<String, ?> sharedState, Map<String, ?> options) {
-        this.subject = subject;
-    }
-    //RESUME CHECKSTYLE CHECK HiddenFieldCheck
-
-    @Override
-    public boolean login() throws LoginException {
-        return true;
-    }
-
-    @Override
-    public boolean logout() throws LoginException {
-        return true;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/security/FalconSecurityConfiguration.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/FalconSecurityConfiguration.java b/common/src/main/java/org/apache/falcon/security/FalconSecurityConfiguration.java
deleted file mode 100644
index b80ab6d..0000000
--- a/common/src/main/java/org/apache/falcon/security/FalconSecurityConfiguration.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.security;
-
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import java.util.HashMap;
-
-/**
- * Falcon JAAS security configuration.
- */
-public class FalconSecurityConfiguration extends Configuration {
-
-    private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
-            new AppConfigurationEntry(SecurityConstants.OS_LOGIN_MODULE_NAME,
-                    AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                    new HashMap<String, String>());
-
-    private static final AppConfigurationEntry[] SIMPLE_CONF =
-            new AppConfigurationEntry[]{OS_SPECIFIC_LOGIN};
-
-    private final Configuration parent;
-
-    public FalconSecurityConfiguration(Configuration parent) {
-        this.parent = parent;
-    }
-
-    @Override
-    public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
-        if (parent == null || appName.equals(SecurityConstants.FALCON_LOGIN)) {
-            return SIMPLE_CONF.clone();
-        } else {
-            return parent.getAppConfigurationEntry(appName);
-        }
-    }
-}


[2/5] FALCON-11 Add support for security in Falcon. Contributed by Venkatesh Seetharam

Posted by ve...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/process/src/main/java/org/apache/falcon/converter/OozieProcessMapper.java
----------------------------------------------------------------------
diff --git a/process/src/main/java/org/apache/falcon/converter/OozieProcessMapper.java b/process/src/main/java/org/apache/falcon/converter/OozieProcessMapper.java
index 87be709..6d0297e 100644
--- a/process/src/main/java/org/apache/falcon/converter/OozieProcessMapper.java
+++ b/process/src/main/java/org/apache/falcon/converter/OozieProcessMapper.java
@@ -41,6 +41,7 @@ import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.entity.v0.process.Property;
 import org.apache.falcon.entity.v0.process.Workflow;
 import org.apache.falcon.expression.ExpressionHelper;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.messaging.EntityInstanceMessage.ARG;
 import org.apache.falcon.oozie.coordinator.CONTROLS;
 import org.apache.falcon.oozie.coordinator.COORDINATORAPP;
@@ -84,20 +85,10 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
         super(entity);
     }
 
-    private void mkdir(FileSystem fs, Path path) throws FalconException {
-        try {
-            if (!fs.exists(path) && !fs.mkdirs(path)) {
-                throw new FalconException("mkdir failed for " + path);
-            }
-        } catch (IOException e) {
-            throw new FalconException("mkdir failed for " + path, e);
-        }
-    }
-
     @Override
     protected List<COORDINATORAPP> getCoordinators(Cluster cluster, Path bundlePath) throws FalconException {
         try {
-            FileSystem fs = ClusterHelper.getFileSystem(cluster);
+            FileSystem fs = HadoopClientFactory.get().createFileSystem(ClusterHelper.getConfiguration(cluster));
             Process process = getEntity();
 
             //Copy user workflow and lib to staging dir
@@ -136,11 +127,11 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
 
     private Path getUserWorkflowPath(Cluster cluster, Path bundlePath) throws FalconException {
         try {
-            FileSystem fs = FileSystem.get(ClusterHelper.getConfiguration(cluster));
+            FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(ClusterHelper.getConfiguration(cluster));
             Process process = getEntity();
             Path wfPath = new Path(process.getWorkflow().getPath());
             if (fs.isFile(wfPath)) {
-                return new Path(bundlePath, EntityUtil.PROCESS_USER_DIR + "/" + wfPath.getName().toString());
+                return new Path(bundlePath, EntityUtil.PROCESS_USER_DIR + "/" + wfPath.getName());
             } else {
                 return new Path(bundlePath, EntityUtil.PROCESS_USER_DIR);
             }
@@ -151,14 +142,15 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
 
     private Path getUserLibPath(Cluster cluster, Path bundlePath) throws FalconException {
         try {
-            FileSystem fs = FileSystem.get(ClusterHelper.getConfiguration(cluster));
             Process process = getEntity();
             if (process.getWorkflow().getLib() == null) {
                 return null;
             }
             Path libPath = new Path(process.getWorkflow().getLib());
+
+            FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(ClusterHelper.getConfiguration(cluster));
             if (fs.isFile(libPath)) {
-                return new Path(bundlePath, EntityUtil.PROCESS_USERLIB_DIR + "/" + libPath.getName().toString());
+                return new Path(bundlePath, EntityUtil.PROCESS_USERLIB_DIR + "/" + libPath.getName());
             } else {
                 return new Path(bundlePath, EntityUtil.PROCESS_USERLIB_DIR);
             }
@@ -516,9 +508,9 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
             if (engineType == EngineType.OOZIE && actionName.equals("user-oozie-workflow")) {
                 action.getSubWorkflow().setAppPath("${nameNode}" + userWfPath);
             } else if (engineType == EngineType.PIG && actionName.equals("user-pig-job")) {
-                decoratePIGAction(cluster, process, processWorkflow, action.getPig(), parentWfPath);
+                decoratePIGAction(cluster, process, action.getPig(), parentWfPath);
             } else if (engineType == EngineType.HIVE && actionName.equals("user-hive-job")) {
-                decorateHiveAction(cluster, process, processWorkflow, action, parentWfPath);
+                decorateHiveAction(cluster, process, action, parentWfPath);
             } else if (FALCON_ACTIONS.contains(actionName)) {
                 decorateWithOozieRetries(action);
             }
@@ -529,7 +521,7 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
     }
 
     private void decoratePIGAction(Cluster cluster, Process process,
-                                   Workflow processWorkflow, PIG pigAction, Path parentWfPath) throws FalconException {
+                                   PIG pigAction, Path parentWfPath) throws FalconException {
         Path userWfPath = getUserWorkflowPath(cluster, parentWfPath.getParent());
         pigAction.setScript("${nameNode}" + userWfPath.toString());
 
@@ -548,11 +540,11 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
             pigAction.getFile().add("${wf:appPath()}/conf/hive-site.xml");
         }
 
-        addArchiveForCustomJars(cluster, processWorkflow, pigAction.getArchive(),
+        addArchiveForCustomJars(cluster, pigAction.getArchive(),
                 getUserLibPath(cluster, parentWfPath.getParent()));
     }
 
-    private void decorateHiveAction(Cluster cluster, Process process, Workflow processWorkflow, ACTION wfAction,
+    private void decorateHiveAction(Cluster cluster, Process process, ACTION wfAction,
                                     Path parentWfPath) throws FalconException {
 
         JAXBElement<org.apache.falcon.oozie.hive.ACTION> actionJaxbElement = unMarshalHiveAction(wfAction);
@@ -571,7 +563,7 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
 
         setupHiveConfiguration(cluster, parentWfPath, "falcon-");
 
-        addArchiveForCustomJars(cluster, processWorkflow, hiveAction.getArchive(),
+        addArchiveForCustomJars(cluster, hiveAction.getArchive(),
                 getUserLibPath(cluster, parentWfPath.getParent()));
 
         marshalHiveAction(wfAction, actionJaxbElement);
@@ -750,16 +742,16 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
                                         String prefix) throws FalconException {
         String catalogUrl = ClusterHelper.getInterface(cluster, Interfacetype.REGISTRY).getEndpoint();
         try {
-            FileSystem fs = FileSystem.get(ClusterHelper.getConfiguration(cluster));
+            FileSystem fs = HadoopClientFactory.get().createFileSystem(ClusterHelper.getConfiguration(cluster));
             Path confPath = new Path(wfPath, "conf");
-            createHiveConf(fs, confPath, catalogUrl, prefix);
+            createHiveConf(fs, confPath, catalogUrl, cluster, prefix);
         } catch (IOException e) {
             throw new FalconException(e);
         }
     }
 
-    private void addArchiveForCustomJars(Cluster cluster, Workflow processWorkflow,
-                                         List<String> archiveList, Path libPath) throws FalconException {
+    private void addArchiveForCustomJars(Cluster cluster, List<String> archiveList,
+                                         Path libPath) throws FalconException {
         if (libPath == null) {
             return;
         }
@@ -814,5 +806,4 @@ public class OozieProcessMapper extends AbstractOozieEntityMapper<Process> {
             throw new RuntimeException("Unable to marshall hive action.", e);
         }
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/process/src/main/resources/config/workflow/process-parent-workflow.xml
----------------------------------------------------------------------
diff --git a/process/src/main/resources/config/workflow/process-parent-workflow.xml b/process/src/main/resources/config/workflow/process-parent-workflow.xml
index 494bf20..f53c1e7 100644
--- a/process/src/main/resources/config/workflow/process-parent-workflow.xml
+++ b/process/src/main/resources/config/workflow/process-parent-workflow.xml
@@ -189,6 +189,8 @@
             <arg>${userWorkflowEngine}</arg>
             <arg>-logDir</arg>
             <arg>${logDir}/job-${nominalTime}/</arg>
+            <arg>-workflowUser</arg>
+            <arg>${wf:user()}</arg>
             <file>${wf:conf("falcon.libpath")}/activemq-core.jar</file>
             <file>${wf:conf("falcon.libpath")}/geronimo-j2ee-management.jar</file>
             <file>${wf:conf("falcon.libpath")}/jms.jar</file>
@@ -256,6 +258,8 @@
             <arg>${userWorkflowEngine}</arg>
             <arg>-logDir</arg>
             <arg>${logDir}/job-${nominalTime}/</arg>
+            <arg>-workflowUser</arg>
+            <arg>${wf:user()}</arg>
             <file>${wf:conf("falcon.libpath")}/activemq-core.jar</file>
             <file>${wf:conf("falcon.libpath")}/geronimo-j2ee-management.jar</file>
             <file>${wf:conf("falcon.libpath")}/jms.jar</file>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java
----------------------------------------------------------------------
diff --git a/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java b/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java
index 61ddbdc..b4c059a 100644
--- a/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java
+++ b/process/src/test/java/org/apache/falcon/converter/OozieProcessMapperTest.java
@@ -46,6 +46,7 @@ import org.apache.falcon.oozie.workflow.ACTION;
 import org.apache.falcon.oozie.workflow.DECISION;
 import org.apache.falcon.oozie.workflow.PIG;
 import org.apache.falcon.oozie.workflow.WORKFLOWAPP;
+import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -82,6 +83,8 @@ public class OozieProcessMapperTest extends AbstractTestBase {
 
     @BeforeClass
     public void setUpDFS() throws Exception {
+        CurrentUser.authenticate("falcon");
+
         EmbeddedCluster cluster = EmbeddedCluster.newCluster("testCluster");
         Configuration conf = cluster.getConf();
         hdfsUrl = conf.get("fs.default.name");

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java b/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java
index 4b35760..f204b15 100644
--- a/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/latedata/LateDataHandler.java
@@ -25,11 +25,14 @@ import org.apache.falcon.catalog.CatalogServiceFactory;
 import org.apache.falcon.entity.CatalogStorage;
 import org.apache.falcon.entity.FeedHelper;
 import org.apache.falcon.entity.Storage;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Logger;
@@ -125,10 +128,15 @@ public class LateDataHandler extends Configured implements Tool {
         return computedMetrics;
     }
 
-    private void persistMetrics(Map<String, Long> metrics, Path file) throws IOException {
+    private void persistMetrics(Map<String, Long> metrics, Path file) throws IOException, FalconException {
         OutputStream out = null;
         try {
-            out = file.getFileSystem(getConf()).create(file);
+            FileSystem fs = HadoopClientFactory.get().createFileSystem(file.toUri(), getConf());
+            out = fs.create(file);
+
+            // making sure falcon can read this file
+            FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL);
+            fs.setPermission(file, permission);
 
             for (Map.Entry<String, Long> entry : metrics.entrySet()) {
                 out.write((entry.getKey() + "=" + entry.getValue() + "\n").getBytes());
@@ -191,7 +199,7 @@ public class LateDataHandler extends Configured implements Tool {
      * @throws IOException
      */
     private long getFileSystemUsageMetric(String pathGroup, Configuration conf)
-        throws IOException {
+        throws IOException, FalconException {
         long usage = 0;
         for (String pathElement : pathGroup.split(",")) {
             Path inPath = new Path(pathElement);
@@ -201,8 +209,8 @@ public class LateDataHandler extends Configured implements Tool {
         return usage;
     }
 
-    private long usage(Path inPath, Configuration conf) throws IOException {
-        FileSystem fs = inPath.getFileSystem(conf);
+    private long usage(Path inPath, Configuration conf) throws IOException, FalconException {
+        FileSystem fs = HadoopClientFactory.get().createFileSystem(inPath.toUri(), conf);
         FileStatus[] fileStatuses = fs.globStatus(inPath);
         if (fileStatuses == null || fileStatuses.length == 0) {
             return 0;
@@ -251,8 +259,8 @@ public class LateDataHandler extends Configured implements Tool {
         throws Exception {
 
         StringBuilder buffer = new StringBuilder();
-        BufferedReader in = new BufferedReader(new InputStreamReader(
-                file.getFileSystem(conf).open(file)));
+        FileSystem fs = HadoopClientFactory.get().createFileSystem(file.toUri(), conf);
+        BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(file)));
         String line;
         try {
             Map<String, Long> recordedMetrics = new LinkedHashMap<String, Long>();

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/event/LaterunEvent.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/event/LaterunEvent.java b/rerun/src/main/java/org/apache/falcon/rerun/event/LaterunEvent.java
index b5ac121..2b52762 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/event/LaterunEvent.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/event/LaterunEvent.java
@@ -23,10 +23,11 @@ package org.apache.falcon.rerun.event;
 public class LaterunEvent extends RerunEvent {
 
     //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
-    public LaterunEvent(String clusterName, String wfId, long msgInsertTime, long delay,
-                        String entityType, String entityName, String instance, int runId) {
+    public LaterunEvent(String clusterName, String wfId, long msgInsertTime,
+                        long delay, String entityType, String entityName,
+                        String instance, int runId, String workflowUser) {
         super(clusterName, wfId, msgInsertTime, delay, entityType, entityName,
-                instance, runId);
+                instance, runId, workflowUser);
     }
     //RESUME CHECKSTYLE CHECK ParameterNumberCheck
 
@@ -37,6 +38,6 @@ public class LaterunEvent extends RerunEvent {
                 + "msgInsertTime=" + msgInsertTime + SEP + "delayInMilliSec="
                 + delayInMilliSec + SEP + "entityType=" + entityType + SEP
                 + "entityName=" + entityName + SEP + "instance=" + instance
-                + SEP + "runId=" + runId;
+                + SEP + "runId=" + runId + SEP + "workflowUser=" + workflowUser;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEvent.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEvent.java b/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEvent.java
index baf4601..254f285 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEvent.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEvent.java
@@ -38,6 +38,7 @@ public class RerunEvent implements Delayed {
 
     protected String clusterName;
     protected String wfId;
+    protected String workflowUser;
     protected long msgInsertTime;
     protected long delayInMilliSec;
     protected String entityType;
@@ -47,9 +48,10 @@ public class RerunEvent implements Delayed {
 
     //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
     public RerunEvent(String clusterName, String wfId, long msgInsertTime, long delay,
-                      String entityType, String entityName, String instance, int runId) {
+                      String entityType, String entityName, String instance, int runId, String workflowUser) {
         this.clusterName = clusterName;
         this.wfId = wfId;
+        this.workflowUser = workflowUser;
         this.msgInsertTime = msgInsertTime;
         this.delayInMilliSec = delay;
         this.entityName = entityName;
@@ -67,6 +69,10 @@ public class RerunEvent implements Delayed {
         return wfId;
     }
 
+    public String getWorkflowUser() {
+        return workflowUser;
+    }
+
     public long getDelayInMilliSec() {
         return delayInMilliSec;
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java b/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java
index 03230f9..c2a8fe2 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/event/RerunEventFactory.java
@@ -45,7 +45,7 @@ public class RerunEventFactory<T extends RerunEvent> {
         return (T) new LaterunEvent(map.get("clusterName"), map.get("wfId"),
                 Long.parseLong(map.get("msgInsertTime")), Long.parseLong(map.get("delayInMilliSec")),
                 map.get("entityType"), map.get("entityName"), map.get("instance"),
-                Integer.parseInt(map.get("runId")));
+                Integer.parseInt(map.get("runId")), map.get("workflowUser"));
     }
 
     @SuppressWarnings("unchecked")
@@ -55,7 +55,7 @@ public class RerunEventFactory<T extends RerunEvent> {
                 Long.parseLong(map.get("msgInsertTime")), Long.parseLong(map.get("delayInMilliSec")),
                 map.get("entityType"), map.get("entityName"), map.get("instance"),
                 Integer.parseInt(map.get("runId")), Integer.parseInt(map.get("attempts")),
-                Integer.parseInt(map.get("failRetryCount")));
+                Integer.parseInt(map.get("failRetryCount")), map.get("workflowUser"));
     }
 
     private Map<String, String> getMap(String message) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java b/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java
index 1396f19..b5312a6 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/event/RetryEvent.java
@@ -28,9 +28,9 @@ public class RetryEvent extends RerunEvent {
     //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
     public RetryEvent(String clusterName, String wfId, long msgInsertTime,
                       long delay, String entityType, String entityName, String instance,
-                      int runId, int attempts, int failRetryCount) {
+                      int runId, int attempts, int failRetryCount, String workflowUser) {
         super(clusterName, wfId, msgInsertTime, delay, entityType, entityName,
-                instance, runId);
+                instance, runId, workflowUser);
         this.attempts = attempts;
         this.failRetryCount = failRetryCount;
     }
@@ -56,7 +56,7 @@ public class RetryEvent extends RerunEvent {
                 + delayInMilliSec + SEP + "entityType=" + entityType + SEP
                 + "entityName=" + entityName + SEP + "instance=" + instance
                 + SEP + "runId=" + runId + SEP + "attempts=" + attempts + SEP
-                + "failRetryCount=" + failRetryCount;
+                + "failRetryCount=" + failRetryCount + SEP + "workflowUser=" + workflowUser;
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java
index b073117..ca2304e 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunConsumer.java
@@ -24,6 +24,7 @@ import org.apache.falcon.rerun.event.RerunEvent;
 import org.apache.falcon.rerun.policy.AbstractRerunPolicy;
 import org.apache.falcon.rerun.policy.ExpBackoffPolicy;
 import org.apache.falcon.rerun.queue.DelayedQueue;
+import org.apache.falcon.security.CurrentUser;
 import org.apache.log4j.Logger;
 
 /**
@@ -51,7 +52,7 @@ public abstract class AbstractRerunConsumer<T extends RerunEvent, M extends Abst
         Frequency frequency = new Frequency("minutes(1)");
         while (true) {
             try {
-                T message = null;
+                T message;
                 try {
                     message = handler.takeFromQueue();
                     attempt = 1;
@@ -64,6 +65,8 @@ public abstract class AbstractRerunConsumer<T extends RerunEvent, M extends Abst
                     attempt++;
                     continue;
                 }
+
+                CurrentUser.authenticate(message.getWorkflowUser());
                 String jobStatus = handler.getWfEngine().getWorkflowStatus(
                         message.getClusterName(), message.getWfId());
                 handleRerun(message.getClusterName(), jobStatus, message);
@@ -72,8 +75,7 @@ public abstract class AbstractRerunConsumer<T extends RerunEvent, M extends Abst
                 LOG.error("Error in rerun consumer:", e);
             }
         }
-
     }
 
-    protected abstract void handleRerun(String cluster, String jobStatus, T message);
+    protected abstract void handleRerun(String clusterName, String jobStatus, T message);
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
index ab7f472..0333918 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/AbstractRerunHandler.java
@@ -46,8 +46,11 @@ public abstract class AbstractRerunHandler<T extends RerunEvent, M extends Delay
         this.delayQueue.init();
     }
 
-    public abstract void handleRerun(String cluster, String entityType, String entityName,
-                                     String nominalTime, String runId, String wfId, long msgReceivedTime);
+    //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
+    public abstract void handleRerun(String clusterName, String entityType,
+                                     String entityName, String nominalTime, String runId,
+                                     String wfId, String workflowUser, long msgReceivedTime);
+    //RESUME CHECKSTYLE CHECK ParameterNumberCheck
 
     public AbstractWorkflowEngine getWfEngine() {
         return wfEngine;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
index fffd5cd..17f4337 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunConsumer.java
@@ -22,6 +22,7 @@ import org.apache.falcon.entity.EntityUtil;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.SchemaHelper;
 import org.apache.falcon.entity.v0.process.LateInput;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.latedata.LateDataHandler;
 import org.apache.falcon.rerun.event.LaterunEvent;
 import org.apache.falcon.rerun.queue.DelayedQueue;
@@ -45,7 +46,7 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
     }
 
     @Override
-    protected void handleRerun(String cluster, String jobStatus,
+    protected void handleRerun(String clusterName, String jobStatus,
                                LaterunEvent message) {
         try {
             if (jobStatus.equals("RUNNING") || jobStatus.equals("PREP")
@@ -65,10 +66,9 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
                         + message.getWfId()
                         + " at "
                         + SchemaHelper.formatDateUTC(new Date()));
-                handler.handleRerun(cluster, message.getEntityType(),
-                        message.getEntityName(), message.getInstance(),
-                        Integer.toString(message.getRunId()),
-                        message.getWfId(), System.currentTimeMillis());
+                handler.handleRerun(clusterName, message.getEntityType(), message.getEntityName(),
+                        message.getInstance(), Integer.toString(message.getRunId()),
+                        message.getWfId(), message.getWorkflowUser(), System.currentTimeMillis());
                 return;
             }
 
@@ -78,18 +78,14 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
             LOG.info("Scheduled late rerun for wf-id: " + message.getWfId()
                     + " on cluster: " + message.getClusterName());
         } catch (Exception e) {
-            LOG.warn(
-                    "Late Re-run failed for instance "
+            LOG.warn("Late Re-run failed for instance "
                             + message.getEntityName() + ":"
                             + message.getInstance() + " after "
-                            + message.getDelayInMilliSec() + " with message:",
-                    e);
-            GenericAlert.alertLateRerunFailed(message.getEntityType(),
-                    message.getEntityName(), message.getInstance(),
-                    message.getWfId(), Integer.toString(message.getRunId()),
-                    e.getMessage());
+                            + message.getDelayInMilliSec() + " with message:", e);
+            GenericAlert.alertLateRerunFailed(message.getEntityType(), message.getEntityName(),
+                    message.getInstance(), message.getWfId(), message.getWorkflowUser(),
+                    Integer.toString(message.getRunId()), e.getMessage());
         }
-
     }
 
     public String detectLate(LaterunEvent message) throws Exception {
@@ -106,7 +102,7 @@ public class LateRerunConsumer<T extends LateRerunHandler<DelayedQueue<LaterunEv
 
         final String storageEndpoint = properties.getProperty(AbstractWorkflowEngine.NAME_NODE);
         Configuration conf = LateRerunHandler.getConfiguration(storageEndpoint);
-        FileSystem fs = FileSystem.get(conf);
+        FileSystem fs = HadoopClientFactory.get().createFileSystem(conf);
         if (!fs.exists(lateLogPath)) {
             LOG.warn("Late log file:" + lateLogPath + " not found:");
             return "";

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
index 897e7ab..72f93cb 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/LateRerunHandler.java
@@ -29,13 +29,13 @@ import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.process.*;
 import org.apache.falcon.entity.v0.process.Process;
 import org.apache.falcon.expression.ExpressionHelper;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.rerun.event.LaterunEvent;
 import org.apache.falcon.rerun.policy.AbstractRerunPolicy;
 import org.apache.falcon.rerun.policy.RerunPolicyFactory;
 import org.apache.falcon.rerun.queue.DelayedQueue;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
@@ -50,8 +50,9 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
         AbstractRerunHandler<LaterunEvent, M> {
 
     @Override
-    public void handleRerun(String cluster, String entityType, String entityName,
-                            String nominalTime, String runId, String wfId, long msgReceivedTime) {
+    //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
+    public void handleRerun(String cluster, String entityType, String entityName, String nominalTime,
+                            String runId, String wfId, String workflowUser, long msgReceivedTime) {
         try {
             Entity entity = EntityUtil.getEntity(entityType, entityName);
             try {
@@ -66,6 +67,7 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
                 LOG.error("Unable to get Late Process for entity:" + entityName);
                 return;
             }
+
             int intRunId = Integer.parseInt(runId);
             Date msgInsertTime = EntityUtil.parseDateUTC(nominalTime);
             Long wait = getEventDelay(entity, nominalTime);
@@ -81,7 +83,8 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
 
                 LOG.info("Going to delete path:" + lateLogPath);
                 final String storageEndpoint = properties.getProperty(AbstractWorkflowEngine.NAME_NODE);
-                FileSystem fs = FileSystem.get(getConfiguration(storageEndpoint));
+                Configuration conf = getConfiguration(storageEndpoint);
+                FileSystem fs = HadoopClientFactory.get().createFileSystem(conf);
                 if (fs.exists(lateLogPath)) {
                     boolean deleted = fs.delete(lateLogPath, true);
                     if (deleted) {
@@ -95,16 +98,17 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
                     + entityType + "(" + entityName + ")" + ":" + nominalTime
                     + " And WorkflowId: " + wfId);
             LaterunEvent event = new LaterunEvent(cluster, wfId, msgInsertTime.getTime(),
-                    wait, entityType, entityName, nominalTime, intRunId);
+                    wait, entityType, entityName, nominalTime, intRunId, workflowUser);
             offerToQueue(event);
         } catch (Exception e) {
             LOG.error("Unable to schedule late rerun for entity instance : "
                     + entityType + "(" + entityName + ")" + ":" + nominalTime
                     + " And WorkflowId: " + wfId, e);
             GenericAlert.alertLateRerunFailed(entityType, entityName,
-                    nominalTime, wfId, runId, e.getMessage());
+                    nominalTime, wfId, workflowUser, runId, e.getMessage());
         }
     }
+    //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
 
     private long getEventDelay(Entity entity, String nominalTime) throws FalconException {
 
@@ -217,7 +221,7 @@ public class LateRerunHandler<M extends DelayedQueue<LaterunEvent>> extends
 
     public static Configuration getConfiguration(String storageEndpoint) throws FalconException {
         Configuration conf = new Configuration();
-        conf.set(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY, storageEndpoint);
+        conf.set(HadoopClientFactory.FS_DEFAULT_NAME_KEY, storageEndpoint);
         return conf;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
index 63dade8..bb0b34a 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryConsumer.java
@@ -38,7 +38,7 @@ public class RetryConsumer<T extends RetryHandler<DelayedQueue<RetryEvent>>>
     }
 
     @Override
-    protected void handleRerun(String cluster, String jobStatus,
+    protected void handleRerun(String clusterName, String jobStatus,
                                RetryEvent message) {
         try {
             if (!jobStatus.equals("KILLED")) {
@@ -80,7 +80,7 @@ public class RetryConsumer<T extends RetryHandler<DelayedQueue<RetryEvent>>>
                     LOG.error("Unable to re-offer to queue:", ex);
                     GenericAlert.alertRetryFailed(message.getEntityType(),
                             message.getEntityName(), message.getInstance(),
-                            message.getWfId(),
+                            message.getWfId(), message.getWorkflowUser(),
                             Integer.toString(message.getRunId()),
                             ex.getMessage());
                 }
@@ -91,7 +91,7 @@ public class RetryConsumer<T extends RetryHandler<DelayedQueue<RetryEvent>>>
                                 + message.getInstance(), e);
                 GenericAlert.alertRetryFailed(message.getEntityType(),
                         message.getEntityName(), message.getInstance(),
-                        message.getWfId(),
+                        message.getWfId(), message.getWorkflowUser(),
                         Integer.toString(message.getRunId()),
                         "Failure retry attempts exhausted");
             }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
index 2b41a7c..ef49c3a 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/handler/RetryHandler.java
@@ -38,8 +38,9 @@ public class RetryHandler<M extends DelayedQueue<RetryEvent>> extends
         AbstractRerunHandler<RetryEvent, M> {
 
     @Override
-    public void handleRerun(String cluster, String entityType, String entityName,
-                            String nominalTime, String runId, String wfId, long msgReceivedTime) {
+    //SUSPEND CHECKSTYLE CHECK ParameterNumberCheck
+    public void handleRerun(String clusterName, String entityType, String entityName, String nominalTime,
+                            String runId, String wfId, String workflowUser, long msgReceivedTime) {
         try {
             Entity entity = getEntity(entityType, entityName);
             Retry retry = getRetry(entity);
@@ -58,9 +59,9 @@ public class RetryHandler<M extends DelayedQueue<RetryEvent>> extends
             if (attempts > intRunId) {
                 AbstractRerunPolicy rerunPolicy = RerunPolicyFactory.getRetryPolicy(policy);
                 long delayTime = rerunPolicy.getDelay(delay, Integer.parseInt(runId));
-                RetryEvent event = new RetryEvent(cluster, wfId,
+                RetryEvent event = new RetryEvent(clusterName, wfId,
                         msgReceivedTime, delayTime, entityType, entityName,
-                        nominalTime, intRunId, attempts, 0);
+                        nominalTime, intRunId, attempts, 0, workflowUser);
                 offerToQueue(event);
             } else {
                 LOG.warn("All retry attempt failed out of configured: "
@@ -69,15 +70,17 @@ public class RetryHandler<M extends DelayedQueue<RetryEvent>> extends
                         + wfId);
 
                 GenericAlert.alertRetryFailed(entityType, entityName,
-                        nominalTime, wfId, runId,
+                        nominalTime, wfId, workflowUser, runId,
                         "All retry attempt failed out of configured: "
                                 + attempts + " attempt for entity instance::");
             }
         } catch (FalconException e) {
             LOG.error("Error during retry of entity instance " + entityName + ":" + nominalTime, e);
-            GenericAlert.alertRetryFailed(entityType, entityName, nominalTime, wfId, runId, e.getMessage());
+            GenericAlert.alertRetryFailed(entityType, entityName, nominalTime,
+                    wfId, workflowUser, runId, e.getMessage());
         }
     }
+    //RESUME CHECKSTYLE CHECK ParameterNumberCheck
 
     @Override
     public void init(M aDelayQueue) throws FalconException {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java
----------------------------------------------------------------------
diff --git a/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java b/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java
index 8234d8a..bc7c999 100644
--- a/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java
+++ b/rerun/src/main/java/org/apache/falcon/rerun/queue/InMemoryQueue.java
@@ -109,9 +109,8 @@ public class InMemoryQueue<T extends RerunEvent> extends DelayedQueue<T> {
         if (!retryFile.exists()) {
             LOG.warn("Rerun file deleted or renamed for process-instance: "
                     + event.getEntityName() + ":" + event.getInstance());
-            GenericAlert.alertRetryFailed(event.getEntityType(),
-                    event.getEntityName(), event.getInstance(),
-                    event.getWfId(), Integer.toString(event.getRunId()),
+            GenericAlert.alertRetryFailed(event.getEntityType(), event.getEntityName(), event.getInstance(),
+                    event.getWfId(), event.getWorkflowUser(), Integer.toString(event.getRunId()),
                     "Rerun file deleted or renamed for process-instance:");
         } else {
             if (!retryFile.delete()) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateRerunHandler.java
----------------------------------------------------------------------
diff --git a/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateRerunHandler.java b/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateRerunHandler.java
index e02b495..8137f60 100644
--- a/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateRerunHandler.java
+++ b/rerun/src/test/java/org/apache/falcon/rerun/handler/TestLateRerunHandler.java
@@ -18,12 +18,12 @@
 
 package org.apache.falcon.rerun.handler;
 
-import junit.framework.Assert;
 import org.apache.falcon.FalconException;
 import org.apache.falcon.entity.EntityUtil;
 import org.apache.falcon.entity.v0.Frequency;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.feed.LateArrival;
+import org.testng.Assert;
 import org.testng.annotations.Test;
 
 import java.util.Date;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/test/java/org/apache/falcon/rerun/queue/ActiveMQTest.java
----------------------------------------------------------------------
diff --git a/rerun/src/test/java/org/apache/falcon/rerun/queue/ActiveMQTest.java b/rerun/src/test/java/org/apache/falcon/rerun/queue/ActiveMQTest.java
index 01d0415..6b6b834 100644
--- a/rerun/src/test/java/org/apache/falcon/rerun/queue/ActiveMQTest.java
+++ b/rerun/src/test/java/org/apache/falcon/rerun/queue/ActiveMQTest.java
@@ -51,7 +51,7 @@ public class ActiveMQTest {
 
         RerunEvent event = new LaterunEvent("clusterName", "wfId",
                 System.currentTimeMillis(), 60 * 1000, "entityType",
-                "entityName", "instance", 0);
+                "entityName", "instance", 0, "falcon");
 
         try {
             activeMQueue.offer(event);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/rerun/src/test/java/org/apache/falcon/rerun/queue/InMemoryQueueTest.java
----------------------------------------------------------------------
diff --git a/rerun/src/test/java/org/apache/falcon/rerun/queue/InMemoryQueueTest.java b/rerun/src/test/java/org/apache/falcon/rerun/queue/InMemoryQueueTest.java
index 6aafaa5..8508d37 100644
--- a/rerun/src/test/java/org/apache/falcon/rerun/queue/InMemoryQueueTest.java
+++ b/rerun/src/test/java/org/apache/falcon/rerun/queue/InMemoryQueueTest.java
@@ -45,7 +45,7 @@ public class InMemoryQueueTest {
             long time = System.currentTimeMillis();
             int delay = ((5 - index) / 2) * 50;
             MyEvent event = new MyEvent("someCluster", Integer.toString(index),
-                    time, delay, "someType", "someName", "someInstance", 0);
+                    time, delay, "someType", "someName", "someInstance", 0, "falcon");
             queue.offer(event);
             boolean inserted = false;
             for (int posn = 0; posn < events.size(); posn++) {
@@ -73,9 +73,9 @@ public class InMemoryQueueTest {
         //SUSPEND CHECKSTYLE CHECK VisibilityModifierCheck
         public MyEvent(String clusterName, String wfId,
                        long msgInsertTime, long delay, String entityType,
-                       String entityName, String instance, int runId) {
+                       String entityName, String instance, int runId, String workflowUser) {
             super(clusterName, wfId, msgInsertTime, delay,
-                    entityType, entityName, instance, runId);
+                    entityType, entityName, instance, runId, workflowUser);
         }
         //RESUME CHECKSTYLE CHECK VisibilityModifierCheck
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/src/bin/falcon
----------------------------------------------------------------------
diff --git a/src/bin/falcon b/src/bin/falcon
index d196a5d..113c9a7 100755
--- a/src/bin/falcon
+++ b/src/bin/falcon
@@ -30,4 +30,4 @@ BASEDIR=`dirname ${PRG}`
 BASEDIR=`cd ${BASEDIR}/..;pwd`
 . ${BASEDIR}/bin/falcon-config.sh 'client'
 
-${JAVA_BIN} -cp ${FALCONCPPATH} org.apache.falcon.cli.FalconCLI "${@}"
+${JAVA_BIN} -cp ${FALCONCPPATH} -Dfalcon.log.dir=$HOME -Dfalcon.app.type=client org.apache.falcon.cli.FalconCLI "${@}"

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/src/conf/log4j.xml
----------------------------------------------------------------------
diff --git a/src/conf/log4j.xml b/src/conf/log4j.xml
index 58ebd80..90abe26 100644
--- a/src/conf/log4j.xml
+++ b/src/conf/log4j.xml
@@ -51,6 +51,15 @@
         </layout>
     </appender>
 
+    <appender name="SECURITY" class="org.apache.log4j.DailyRollingFileAppender">
+        <param name="File" value="${falcon.log.dir}/${falcon.app.type}.security.audit.log"/>
+        <param name="Append" value="true"/>
+        <param name="Threshold" value="debug"/>
+        <layout class="org.apache.log4j.PatternLayout">
+            <param name="ConversionPattern" value="%d %x %m%n"/>
+        </layout>
+    </appender>
+
     <logger name="org.apache.falcon" additivity="false">
         <level value="debug"/>
         <appender-ref ref="FILE"/>
@@ -66,6 +75,26 @@
         <appender-ref ref="METRIC"/>
     </logger>
 
+    <logger name="org.apache.hadoop.security" additivity="false">
+        <level value="info"/>
+        <appender-ref ref="SECURITY"/>
+    </logger>
+
+    <logger name="org.apache.hadoop" additivity="false">
+        <level value="info"/>
+        <appender-ref ref="FILE"/>
+    </logger>
+
+    <logger name="org.apache.oozie" additivity="false">
+        <level value="info"/>
+        <appender-ref ref="FILE"/>
+    </logger>
+
+    <logger name="org.apache.hadoop.hive" additivity="false">
+        <level value="info"/>
+        <appender-ref ref="FILE"/>
+    </logger>
+
     <root>
         <priority value="info"/>
         <appender-ref ref="FILE"/>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/src/conf/startup.properties
----------------------------------------------------------------------
diff --git a/src/conf/startup.properties b/src/conf/startup.properties
index 79cd211..0d0ab41 100644
--- a/src/conf/startup.properties
+++ b/src/conf/startup.properties
@@ -32,8 +32,9 @@
 *.ProcessInstanceManager.impl=org.apache.falcon.resource.InstanceManager
 *.catalog.service.impl=org.apache.falcon.catalog.HiveCatalogService
 
-*.application.services=org.apache.falcon.entity.store.ConfigurationStore,\
+*.application.services=org.apache.falcon.security.AuthenticationInitializationService,\
                         org.apache.falcon.service.ProcessSubscriberService,\
+                        org.apache.falcon.entity.store.ConfigurationStore,\
                         org.apache.falcon.rerun.service.RetryService,\
 						org.apache.falcon.rerun.service.LateRunService,\
 						org.apache.falcon.service.SLAMonitoringService,\
@@ -51,18 +52,79 @@ prism.configstore.listeners=org.apache.falcon.entity.v0.EntityGraph,\
 
 ######### Implementation classes #########
 
+
+######### System startup parameters #########
+
+# Location to store user entity configurations
 *.config.store.uri=file://${falcon.home}/store
+
+# Location of libraries that is shipped to Hadoop
 *.system.lib.location=${falcon.home}/server/webapp/falcon/WEB-INF/lib
 prism.system.lib.location=${falcon.home}/server/webapp/prism/WEB-INF/lib
-*.broker.url=tcp://localhost:61616
+
 *.retry.recorder.path=${falcon.log.dir}/retry
 
 *.falcon.cleanup.service.frequency=days(1)
 
-#default time-to-live for a JMS message 3 days (time in minutes)
+
+######### Properties for configuring JMS provider - activemq #########
+# Default Active MQ url
+*.broker.url=tcp://localhost:61616
+
+# default time-to-live for a JMS message 3 days (time in minutes)
 *.broker.ttlInMins=4320
 *.entity.topic=FALCON.ENTITY.TOPIC
 *.max.retry.failure.count=1
 
 ######### Properties for configuring iMon client and metric #########
 *.internal.queue.size=1000
+
+
+######### Authentication Properties #########
+
+# Authentication type must be specified: simple|kerberos
+*.falcon.authentication.type=simple
+
+##### Service Configuration
+
+# Indicates the Kerberos principal to be used in Falcon Service.
+*.falcon.service.authentication.kerberos.principal=
+
+# Location of the keytab file with the credentials for the Service principal.
+*.falcon.service.authentication.kerberos.keytab=
+
+# name node principal to talk to config store
+*.dfs.namenode.kerberos.principal=
+
+##### SPNEGO Configuration
+
+# Authentication type must be specified: simple|kerberos|<class>
+# org.apache.falcon.security.RemoteUserInHeaderBasedAuthenticationHandler can be used for backwards compatibility
+*.falcon.http.authentication.type=simple
+
+# Indicates how long (in seconds) an authentication token is valid before it has to be renewed.
+*.falcon.http.authentication.token.validity=36000
+
+# The signature secret for signing the authentication tokens.
+*.falcon.http.authentication.signature.secret=falcon
+
+# The domain to use for the HTTP cookie that stores the authentication token.
+*.falcon.http.authentication.cookie.domain=
+
+# Indicates if anonymous requests are allowed when using 'simple' authentication.
+*.falcon.http.authentication.simple.anonymous.allowed=true
+
+# Indicates the Kerberos principal to be used for HTTP endpoint.
+# The principal MUST start with 'HTTP/' as per Kerberos HTTP SPNEGO specification.
+*.falcon.http.authentication.kerberos.principal=
+
+# Location of the keytab file with the credentials for the HTTP principal.
+*.falcon.http.authentication.kerberos.keytab=
+
+# The kerberos names rules is to resolve kerberos principal names, refer to Hadoop's KerberosName for more details.
+*.falcon.http.authentication.kerberos.name.rules=DEFAULT
+
+# Comma separated list of black listed users
+*.falcon.http.authentication.blacklisted.users=
+
+######### Authentication Properties #########

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
----------------------------------------------------------------------
diff --git a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
index 2b55407..af29f93 100644
--- a/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
+++ b/test-util/src/main/java/org/apache/falcon/cluster/util/EmbeddedCluster.java
@@ -43,7 +43,6 @@ public class EmbeddedCluster {
     protected EmbeddedCluster() {
     }
 
-    //private MiniDFSCluster dfsCluster;
     protected Configuration conf = newConfiguration();
     protected Cluster clusterEntity;
 
@@ -81,6 +80,7 @@ public class EmbeddedCluster {
         cluster.conf.set("jail.base", System.getProperty("hadoop.tmp.dir",
                 cluster.conf.get("hadoop.tmp.dir", "/tmp")));
         cluster.conf.set("fs.default.name", "jail://" + (global ? "global" : name) + ":00");
+
         String hdfsUrl = cluster.conf.get("fs.default.name");
         LOG.info("Cluster Namenode = " + hdfsUrl);
         cluster.buildClusterObject(name);
@@ -133,17 +133,9 @@ public class EmbeddedCluster {
     }
 
     public void shutdown() {
-        //dfsCluster.shutdown();
     }
 
     public Cluster getCluster() {
         return clusterEntity;
     }
-
-    public Cluster clone(String cloneName) {
-        EmbeddedCluster clone = new EmbeddedCluster();
-        clone.conf = this.conf;
-        clone.buildClusterObject(cloneName);
-        return clone.clusterEntity;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/pom.xml
----------------------------------------------------------------------
diff --git a/webapp/pom.xml b/webapp/pom.xml
index 8c37409..0c9e601 100644
--- a/webapp/pom.xml
+++ b/webapp/pom.xml
@@ -207,6 +207,11 @@
         </dependency>
 
         <dependency>
+            <groupId>org.apache.hcatalog</groupId>
+            <artifactId>webhcat-java-client</artifactId>
+        </dependency>
+
+        <dependency>
             <groupId>org.apache.falcon</groupId>
             <artifactId>falcon-hadoop-webapp</artifactId>
             <type>war</type>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/conf/oozie/conf/oozie-site.xml
----------------------------------------------------------------------
diff --git a/webapp/src/conf/oozie/conf/oozie-site.xml b/webapp/src/conf/oozie/conf/oozie-site.xml
index e5f404a..5f644a2 100644
--- a/webapp/src/conf/oozie/conf/oozie-site.xml
+++ b/webapp/src/conf/oozie/conf/oozie-site.xml
@@ -304,7 +304,7 @@
 
     <property>
         <name>oozie.authentication.simple.anonymous.allowed</name>
-        <value>true</value>
+        <value>false</value>
         <description>
             Indicates if anonymous requests are allowed.
             This setting is meaningful only when using 'simple' authentication.
@@ -485,51 +485,11 @@
     <!-- Proxyuser Configuration -->
     <property>
         <name>oozie.service.ProxyUserService.proxyuser.${user.name}.hosts</name>
-        <value>localhost</value>
-        <description></description>
-    </property>
-    <property>
-        <name>oozie.service.ProxyUserService.proxyuser.${user.name}.groups</name>
-        <value>users</value>
-        <description></description>
-    </property>
-
-    <!--
-
-    <property>
-        <name>oozie.service.ProxyUserService.proxyuser.#USER#.hosts</name>
         <value>*</value>
-        <description>
-            List of hosts the '#USER#' user is allowed to perform 'doAs'
-            operations.
-
-            The '#USER#' must be replaced with the username o the user who is
-            allowed to perform 'doAs' operations.
-
-            The value can be the '*' wildcard or a list of hostnames.
-
-            For multiple users copy this property and replace the user name
-            in the property name.
-        </description>
     </property>
-
     <property>
-        <name>oozie.service.ProxyUserService.proxyuser.#USER#.groups</name>
+        <name>oozie.service.ProxyUserService.proxyuser.${user.name}.groups</name>
         <value>*</value>
-        <description>
-            List of groups the '#USER#' user is allowed to impersonate users
-            from to perform 'doAs' operations.
-
-            The '#USER#' must be replaced with the username o the user who is
-            allowed to perform 'doAs' operations.
-
-            The value can be the '*' wildcard or a list of groups.
-
-            For multiple users copy this property and replace the user name
-            in the property name.
-        </description>
     </property>
 
-    -->
-
 </configuration>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/main/resources/log4j.xml
----------------------------------------------------------------------
diff --git a/webapp/src/main/resources/log4j.xml b/webapp/src/main/resources/log4j.xml
index d133b8e..5ba6f16 100644
--- a/webapp/src/main/resources/log4j.xml
+++ b/webapp/src/main/resources/log4j.xml
@@ -54,6 +54,15 @@
         </layout>
     </appender>
 
+    <appender name="SECURITY" class="org.apache.log4j.DailyRollingFileAppender">
+        <param name="File" value="${user.dir}/target/logs/security.audit.log"/>
+        <param name="Append" value="true"/>
+        <param name="Threshold" value="debug"/>
+        <layout class="org.apache.log4j.PatternLayout">
+            <param name="ConversionPattern" value="%d %x %m%n"/>
+        </layout>
+    </appender>
+
     <logger name="org.apache.falcon" additivity="false">
         <level value="debug"/>
         <appender-ref ref="FILE"/>
@@ -69,6 +78,26 @@
         <appender-ref ref="METRIC"/>
     </logger>
 
+    <logger name="org.apache.hadoop.security" additivity="false">
+        <level value="info"/>
+        <appender-ref ref="SECURITY"/>
+    </logger>
+
+    <logger name="org.apache.hadoop" additivity="false">
+        <level value="info"/>
+        <appender-ref ref="FILE"/>
+    </logger>
+
+    <logger name="org.apache.oozie" additivity="false">
+        <level value="info"/>
+        <appender-ref ref="FILE"/>
+    </logger>
+
+    <logger name="org.apache.hadoop.hive" additivity="false">
+        <level value="info"/>
+        <appender-ref ref="FILE"/>
+    </logger>
+
     <root>
         <priority value="info"/>
         <appender-ref ref="FILE"/>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java b/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java
index 9909140..fd004a1 100644
--- a/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java
+++ b/webapp/src/test/java/org/apache/falcon/catalog/HiveCatalogServiceIT.java
@@ -19,6 +19,8 @@
 package org.apache.falcon.catalog;
 
 import org.apache.falcon.FalconException;
+import org.apache.falcon.resource.TestContext;
+import org.apache.falcon.security.CurrentUser;
 import org.apache.hcatalog.api.HCatAddPartitionDesc;
 import org.apache.hcatalog.api.HCatClient;
 import org.apache.hcatalog.api.HCatCreateDBDesc;
@@ -55,6 +57,9 @@ public class HiveCatalogServiceIT {
 
     @BeforeClass
     public void setUp() throws Exception {
+        // setup a logged in user
+        CurrentUser.authenticate(TestContext.REMOTE_USER);
+
         hiveCatalogService = new HiveCatalogService();
         client = HiveCatalogService.get(METASTORE_URL);
 
@@ -168,22 +173,23 @@ public class HiveCatalogServiceIT {
 
     @Test
     public void testIsAlive() throws Exception {
-        Assert.assertTrue(hiveCatalogService.isAlive(METASTORE_URL));
+        Assert.assertTrue(hiveCatalogService.isAlive(METASTORE_URL, "metaStorePrincipal"));
     }
 
-    @Test (expectedExceptions = FalconException.class)
+    @Test (expectedExceptions = Exception.class)
     public void testIsAliveNegative() throws Exception {
-        hiveCatalogService.isAlive("thrift://localhost:9999");
+        hiveCatalogService.isAlive("thrift://localhost:9999", "metaStorePrincipal");
     }
 
     @Test (expectedExceptions = FalconException.class)
     public void testTableExistsNegative() throws Exception {
-        hiveCatalogService.tableExists(METASTORE_URL, DATABASE_NAME, "blah");
+        hiveCatalogService.tableExists(METASTORE_URL, DATABASE_NAME, "blah", "metaStorePrincipal");
     }
 
     @Test
     public void testTableExists() throws Exception {
-        Assert.assertTrue(hiveCatalogService.tableExists(METASTORE_URL, DATABASE_NAME, TABLE_NAME));
+        Assert.assertTrue(hiveCatalogService.tableExists(
+                METASTORE_URL, DATABASE_NAME, TABLE_NAME, "metaStorePrincipal"));
     }
 
     @Test

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
index 72369c0..5cd7beb 100644
--- a/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
+++ b/webapp/src/test/java/org/apache/falcon/cli/FalconCLIIT.java
@@ -19,6 +19,7 @@
 package org.apache.falcon.cli;
 
 import org.apache.falcon.resource.TestContext;
+import org.apache.falcon.util.OozieTestUtils;
 import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
@@ -39,9 +40,6 @@ import java.util.Map;
 public class FalconCLIIT {
 
     private InMemoryWriter stream = new InMemoryWriter(System.out);
-    // private static final String BROKER_URL =
-    // "tcp://localhost:61616?daemon=true";
-    private static final boolean TEST_ENABLED = true;
 
     @BeforeClass
     public void prepare() throws Exception {
@@ -56,7 +54,7 @@ public class FalconCLIIT {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
 
-        filePath = context.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.CLUSTER_TEMPLATE, overlay);
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -submit -type cluster -file " + filePath));
@@ -64,7 +62,7 @@ public class FalconCLIIT {
         Assert.assertEquals(stream.buffer.toString().trim(),
                 "default/Submit successful (cluster) " + context.getClusterName());
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
         Assert.assertEquals(
@@ -72,7 +70,7 @@ public class FalconCLIIT {
                 "default/Submit successful (feed) "
                         + overlay.get("inputFeedName"));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
         Assert.assertEquals(
@@ -80,7 +78,7 @@ public class FalconCLIIT {
                 "default/Submit successful (feed) "
                         + overlay.get("outputFeedName"));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -submit -type process -file " + filePath));
@@ -102,29 +100,29 @@ public class FalconCLIIT {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
 
-        filePath = context.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay);
+        filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay);
         Assert.assertEquals(-1,
                 executeWithURL("entity -submitAndSchedule -type cluster -file "
                         + filePath));
         context.setCluster(overlay.get("cluster"));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submitAndSchedule -type feed -file "
                         + filePath));
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submitAndSchedule -type feed -file "
                         + filePath));
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submitAndSchedule -type process -file "
                         + filePath));
@@ -136,7 +134,7 @@ public class FalconCLIIT {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
 
-        filePath = context.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay);
+        filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -validate -type cluster -file "
                         + filePath));
@@ -146,19 +144,19 @@ public class FalconCLIIT {
                 executeWithURL("entity -submit -type cluster -file " + filePath));
         context.setCluster(overlay.get("cluster"));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -validate -type feed -file " + filePath));
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -validate -type feed -file " + filePath));
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -validate -type process -file "
                         + filePath));
@@ -239,7 +237,7 @@ public class FalconCLIIT {
                 executeWithURL("entity -schedule -type process -name "
                         + overlay.get("processName")));
 
-        context.waitForProcessWFtoStart();
+        OozieTestUtils.waitForProcessWFtoStart(context);
 
         Assert.assertEquals(
                 0,
@@ -329,7 +327,7 @@ public class FalconCLIIT {
 
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
-        context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(-1,
                 executeWithURL("entity -submit -type feed -name " + "name"));
 
@@ -349,7 +347,7 @@ public class FalconCLIIT {
         Assert.assertEquals(0,
                 executeWithURL("entity -schedule -type feed -name "
                         + overlay.get("outputFeedName")));
-        context.waitForProcessWFtoStart();
+        OozieTestUtils.waitForProcessWFtoStart(context);
 
         Assert.assertEquals(0,
                 executeWithURL("instance -status -type feed -name "
@@ -378,7 +376,7 @@ public class FalconCLIIT {
         Assert.assertEquals(0,
                 executeWithURL("entity -schedule -type feed -name "
                         + overlay.get("outputFeedName")));
-        context.waitForProcessWFtoStart();
+        OozieTestUtils.waitForProcessWFtoStart(context);
 
         Assert.assertEquals(0,
                 executeWithURL("instance -status -type feed -name "
@@ -428,7 +426,7 @@ public class FalconCLIIT {
                 executeWithURL("entity -schedule -type process -name "
                         + overlay.get("processName")));
 
-        context.waitForProcessWFtoStart();
+        OozieTestUtils.waitForProcessWFtoStart(context);
         Assert.assertEquals(
                 0,
                 executeWithURL("instance -kill -type process -name "
@@ -452,7 +450,7 @@ public class FalconCLIIT {
                 executeWithURL("entity -schedule -type process -name "
                         + overlay.get("processName")));
 
-        context.waitForProcessWFtoStart();
+        OozieTestUtils.waitForProcessWFtoStart(context);
         Assert.assertEquals(
                 0,
                 executeWithURL("instance -kill -type process -name "
@@ -481,7 +479,6 @@ public class FalconCLIIT {
         Assert.assertEquals(-1,
                 executeWithURL("instance -kill -type process -name "
                         + " -start 2010-01-01T01:00Z  -end 2010-01-01T03:00Z"));
-
     }
 
     public void testFalconURL() throws Exception {
@@ -495,8 +492,6 @@ public class FalconCLIIT {
                         + "processName -url http://unknownhost:1234/"
                         + " -start 2010-01-01T01:00Z  -end 2010-01-01T03:00Z")
                         .split("\\s")));
-
-
     }
 
     public void testClientProperties() throws Exception {
@@ -504,8 +499,7 @@ public class FalconCLIIT {
         Map<String, String> overlay = context.getUniqueOverlay();
         submitTestFiles(context, overlay);
 
-        Assert.assertEquals(
-                0,
+        Assert.assertEquals(0,
                 new FalconCLI().run(("entity -schedule -type feed -name "
                         + overlay.get("outputFeedName") + " -url "
                         + TestContext.BASE_URL).split("\\s+")));
@@ -514,15 +508,21 @@ public class FalconCLIIT {
                 new FalconCLI().run(("entity -schedule -type process -name "
                         + overlay.get("processName")+ " -url "
                         + TestContext.BASE_URL).split("\\s+")));
-
     }
 
     public void testGetVersion() throws Exception {
         Assert.assertEquals(0,
-                new FalconCLI().run("admin -version".split("\\s")));
+                new FalconCLI().run(("admin -version -url " + TestContext.BASE_URL).split("\\s")));
+    }
+
+    public void testGetStatus() throws Exception {
+        Assert.assertEquals(0,
+                new FalconCLI().run(("admin -status -url " + TestContext.BASE_URL).split("\\s")));
+    }
 
+    public void testGetThreadStackDump() throws Exception {
         Assert.assertEquals(0,
-                new FalconCLI().run("admin -stack".split("\\s")));
+                new FalconCLI().run(("admin -stack -url " + TestContext.BASE_URL).split("\\s")));
     }
 
     public void testInstanceGetLogs() throws Exception {
@@ -538,7 +538,6 @@ public class FalconCLIIT {
                 executeWithURL("instance -logs -type process -name "
                         + overlay.get("processName")
                         + " -start " + START_INSTANCE + " -end " + START_INSTANCE));
-
     }
 
     private int executeWithURL(String command) throws Exception {
@@ -560,22 +559,22 @@ public class FalconCLIIT {
 
     private void submitTestFiles(TestContext context, Map<String, String> overlay) throws Exception {
 
-        String filePath = context.overlayParametersOverTemplate(context.getClusterFileTemplate(),
+        String filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(),
                 overlay);
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -submit -type cluster -file " + filePath));
         context.setCluster(overlay.get("cluster"));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Assert.assertEquals(
                 0,
                 executeWithURL("entity -submit -type process -file " + filePath));
@@ -596,6 +595,7 @@ public class FalconCLIIT {
             super.println(x);
         }
 
+        @SuppressWarnings("UnusedDeclaration")
         public String getBuffer() {
             return buffer.toString();
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java b/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java
index 55f240f..d503735 100644
--- a/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java
+++ b/webapp/src/test/java/org/apache/falcon/cli/FalconCLISmokeIT.java
@@ -19,6 +19,7 @@
 package org.apache.falcon.cli;
 
 import org.apache.falcon.resource.TestContext;
+import org.apache.falcon.util.OozieTestUtils;
 import org.testng.Assert;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
@@ -44,39 +45,39 @@ public class FalconCLISmokeIT {
         TestContext context = new TestContext();
         Map<String, String> overlay = context.getUniqueOverlay();
 
-        filePath = context.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay);
+        filePath = TestContext.overlayParametersOverTemplate(context.getClusterFileTemplate(), overlay);
         Assert.assertEquals(-1,
                 executeWithURL("entity -submitAndSchedule -type cluster -file "
                         + filePath));
         context.setCluster(overlay.get("cluster"));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submitAndSchedule -type feed -file "
                         + filePath));
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submitAndSchedule -type feed -file "
                         + filePath));
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE1, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.FEED_TEMPLATE2, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submit -type feed -file " + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -validate -type process -file "
                         + filePath));
 
-        filePath = context.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
+        filePath = TestContext.overlayParametersOverTemplate(TestContext.PROCESS_TEMPLATE, overlay);
         Assert.assertEquals(0,
                 executeWithURL("entity -submitAndSchedule -type process -file "
                         + filePath));
 
-        context.waitForProcessWFtoStart();
+        OozieTestUtils.waitForProcessWFtoStart(context);
 
         Assert.assertEquals(0,
                 executeWithURL("entity -definition -type cluster -name "
@@ -90,7 +91,6 @@ public class FalconCLISmokeIT {
         Assert.assertEquals(0,
                 executeWithURL("instance -running -type process -name "
                         + overlay.get("processName")));
-
     }
 
     private int executeWithURL(String command) throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/late/LateDataHandlerIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/late/LateDataHandlerIT.java b/webapp/src/test/java/org/apache/falcon/late/LateDataHandlerIT.java
index 6cfa4e6..ab60307 100644
--- a/webapp/src/test/java/org/apache/falcon/late/LateDataHandlerIT.java
+++ b/webapp/src/test/java/org/apache/falcon/late/LateDataHandlerIT.java
@@ -67,7 +67,7 @@ public class LateDataHandlerIT {
     public void prepare() throws Exception {
         TestContext.cleanupStore();
 
-        String filePath = context.overlayParametersOverTemplate(
+        String filePath = TestContext.overlayParametersOverTemplate(
                 TestContext.CLUSTER_TEMPLATE, context.getUniqueOverlay());
         context.setCluster(filePath);
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/lifecycle/FileSystemFeedReplicationIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/lifecycle/FileSystemFeedReplicationIT.java b/webapp/src/test/java/org/apache/falcon/lifecycle/FileSystemFeedReplicationIT.java
index 058b35c..92ff8ac 100644
--- a/webapp/src/test/java/org/apache/falcon/lifecycle/FileSystemFeedReplicationIT.java
+++ b/webapp/src/test/java/org/apache/falcon/lifecycle/FileSystemFeedReplicationIT.java
@@ -64,7 +64,7 @@ public class FileSystemFeedReplicationIT {
         TestContext.cleanupStore();
 
         Map<String, String> overlay = sourceContext.getUniqueOverlay();
-        String sourceFilePath = sourceContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
+        String sourceFilePath = TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
         sourceContext.setCluster(sourceFilePath);
 
         final Cluster sourceCluster = sourceContext.getCluster().getCluster();
@@ -74,21 +74,21 @@ public class FileSystemFeedReplicationIT {
         final String sourcePath = sourceStorageUrl + SOURCE_LOCATION + PARTITION_VALUE;
         FSUtils.copyResourceToHDFS("/apps/data/data.txt", "data.txt", sourcePath);
 
-        String targetFilePath = targetContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
+        String targetFilePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
         targetContext.setCluster(targetFilePath);
 
         final Cluster targetCluster = targetContext.getCluster().getCluster();
         copyLibsToHDFS(targetCluster);
 
-        String file = targetAlphaContext.overlayParametersOverTemplate("/table/target-cluster-alpha.xml", overlay);
+        String file = TestContext.overlayParametersOverTemplate("/table/target-cluster-alpha.xml", overlay);
         targetAlphaContext.setCluster(file);
         copyLibsToHDFS(targetAlphaContext.getCluster().getCluster());
 
-        file = targetBetaContext.overlayParametersOverTemplate("/table/target-cluster-beta.xml", overlay);
+        file = TestContext.overlayParametersOverTemplate("/table/target-cluster-beta.xml", overlay);
         targetBetaContext.setCluster(file);
         copyLibsToHDFS(targetBetaContext.getCluster().getCluster());
 
-        file = targetGammaContext.overlayParametersOverTemplate("/table/target-cluster-gamma.xml", overlay);
+        file = TestContext.overlayParametersOverTemplate("/table/target-cluster-gamma.xml", overlay);
         targetGammaContext.setCluster(file);
         copyLibsToHDFS(targetGammaContext.getCluster().getCluster());
     }
@@ -123,17 +123,17 @@ public class FileSystemFeedReplicationIT {
     @Test (enabled = false)
     public void testFSReplicationSingleSourceToTarget() throws Exception {
         final Map<String, String> overlay = sourceContext.getUniqueOverlay();
-        String filePath = sourceContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
+        String filePath = TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = targetContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/bcp-cluster.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
         // verify if the partition on the source exists - precondition
         FileSystem sourceFS = FileSystem.get(ClusterHelper.getConfiguration(sourceContext.getCluster().getCluster()));
         Assert.assertTrue(sourceFS.exists(new Path(SOURCE_LOCATION + PARTITION_VALUE)));
 
-        filePath = sourceContext.overlayParametersOverTemplate("/table/customer-fs-replicating-feed.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/customer-fs-replicating-feed.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
 
         // wait until the workflow job completes
@@ -148,7 +148,7 @@ public class FileSystemFeedReplicationIT {
         Assert.assertTrue(fs.exists(new Path(TARGET_LOCATION + PARTITION_VALUE)));
 
         InstancesResult response = targetContext.getService().path("api/instance/running/feed/" + feedName)
-                .header("Remote-User", "guest")
+                .header("Cookie", targetContext.getAuthenticationToken())
                 .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(response.getStatus(), APIResult.Status.SUCCEEDED);
@@ -161,23 +161,23 @@ public class FileSystemFeedReplicationIT {
     @Test (enabled = false)
     public void testFSReplicationSingleSourceToMultipleTargets() throws Exception {
         final Map<String, String> overlay = sourceContext.getUniqueOverlay();
-        String filePath = sourceContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
+        String filePath = TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = targetContext.overlayParametersOverTemplate("/table/target-cluster-alpha.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/target-cluster-alpha.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = targetContext.overlayParametersOverTemplate("/table/target-cluster-beta.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/target-cluster-beta.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = targetContext.overlayParametersOverTemplate("/table/target-cluster-gamma.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/target-cluster-gamma.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
         // verify if the partition on the source exists - precondition
         FileSystem sourceFS = FileSystem.get(ClusterHelper.getConfiguration(sourceContext.getCluster().getCluster()));
         Assert.assertTrue(sourceFS.exists(new Path(SOURCE_LOCATION + PARTITION_VALUE)));
 
-        filePath = sourceContext.overlayParametersOverTemplate("/table/multiple-targets-replicating-feed.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/multiple-targets-replicating-feed.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
 
         // wait until the workflow job completes
@@ -201,7 +201,7 @@ public class FileSystemFeedReplicationIT {
                 gamma.exists(new Path("/falcon/test/target-cluster-gamma/customer_gamma/" + PARTITION_VALUE)));
 
         InstancesResult response = targetContext.getService().path("api/instance/running/feed/" + feedName)
-                .header("Remote-User", "guest")
+                .header("Cookie", targetContext.getAuthenticationToken())
                 .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(response.getStatus(), APIResult.Status.SUCCEEDED);
@@ -226,19 +226,19 @@ public class FileSystemFeedReplicationIT {
         Assert.assertTrue(sourceFS.exists(sourcePath));
 
         final Map<String, String> overlay = sourceContext.getUniqueOverlay();
-        String filePath = sourceContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
+        String filePath = TestContext.overlayParametersOverTemplate("/table/primary-cluster.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = targetContext.overlayParametersOverTemplate("/table/target-cluster-alpha.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/target-cluster-alpha.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
-        filePath = targetContext.overlayParametersOverTemplate("/table/target-cluster-beta.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/target-cluster-beta.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submit -type cluster -file " + filePath));
 
         // verify if the partition on the source exists - precondition
         Assert.assertTrue(sourceFS.exists(sourcePath));
 
-        filePath = sourceContext.overlayParametersOverTemplate("/table/complex-replicating-feed.xml", overlay);
+        filePath = TestContext.overlayParametersOverTemplate("/table/complex-replicating-feed.xml", overlay);
         Assert.assertEquals(0, TestContext.executeWithURL("entity -submitAndSchedule -type feed -file " + filePath));
 
         // wait until the workflow job completes
@@ -257,7 +257,7 @@ public class FileSystemFeedReplicationIT {
         Assert.assertTrue(beta.exists(new Path("/localDC/rc/billing/ua2/" + partitionValue)));
 
         InstancesResult response = targetContext.getService().path("api/instance/running/feed/" + feedName)
-                .header("Remote-User", "guest")
+                .header("Cookie", targetContext.getAuthenticationToken())
                 .accept(MediaType.APPLICATION_JSON)
                 .get(InstancesResult.class);
         Assert.assertEquals(response.getStatus(), APIResult.Status.SUCCEEDED);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java
index 37226e2..770780e 100644
--- a/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java
+++ b/webapp/src/test/java/org/apache/falcon/lifecycle/TableStorageFeedEvictorIT.java
@@ -356,6 +356,7 @@ public class TableStorageFeedEvictorIT {
             super.println(x);
         }
 
+        @SuppressWarnings("UnusedDeclaration")
         public String getBuffer() {
             return buffer.toString();
         }


[4/5] FALCON-11 Add support for security in Falcon. Contributed by Venkatesh Seetharam

Posted by ve...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/security/SecurityConstants.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/SecurityConstants.java b/common/src/main/java/org/apache/falcon/security/SecurityConstants.java
deleted file mode 100644
index 8f7ba4a..0000000
--- a/common/src/main/java/org/apache/falcon/security/SecurityConstants.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.falcon.security;
-
-import com.sun.security.auth.UnixPrincipal;
-
-import java.security.Principal;
-
-/**
- * Constants for the security module.
- */
-public final class SecurityConstants {
-
-    private SecurityConstants() {}
-
-    public static final String OS_LOGIN_MODULE_NAME =
-            "com.sun.security.auth.module.UnixLoginModule";
-    public static final Class<? extends Principal> OS_PRINCIPAL_CLASS =
-            UnixPrincipal.class;
-
-    public static final String FALCON_LOGIN = "FALCON_DEFAULT_LOGIN";
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/security/SecurityUtil.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/security/SecurityUtil.java b/common/src/main/java/org/apache/falcon/security/SecurityUtil.java
new file mode 100644
index 0000000..f78043f
--- /dev/null
+++ b/common/src/main/java/org/apache/falcon/security/SecurityUtil.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.security;
+
+import org.apache.falcon.util.StartupProperties;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * Security Util - bunch of security related helper methods.
+ * Also doles out proxied UserGroupInformation. Caches proxied users.
+ */
+public final class SecurityUtil {
+
+    /**
+     * Constant for the configuration property that indicates the prefix.
+     */
+    private static final String CONFIG_PREFIX = "falcon.authentication.";
+
+    /**
+     * Constant for the configuration property that indicates the authentication type.
+     */
+    public static final String AUTHENTICATION_TYPE = CONFIG_PREFIX + "type";
+
+    /**
+     * Constant for the configuration property that indicates the Name node principal.
+     */
+    public static final String NN_PRINCIPAL = "dfs.namenode.kerberos.principal";
+
+    /**
+     * Constant for the configuration property that indicates the Name node principal.
+     * This is used to talk to Hive Meta Store during parsing and validations only.
+     */
+    public static final String HIVE_METASTORE_PRINCIPAL = "hive.metastore.kerberos.principal";
+
+
+    private static ConcurrentMap<String, UserGroupInformation> userUgiMap =
+            new ConcurrentHashMap<String, UserGroupInformation>();
+
+    private SecurityUtil() {
+    }
+
+    public static String getAuthenticationType() {
+        return StartupProperties.get().getProperty(
+                AUTHENTICATION_TYPE, PseudoAuthenticationHandler.TYPE);
+    }
+
+    public static boolean isSecurityEnabled() {
+        String authenticationType = StartupProperties.get().getProperty(
+                AUTHENTICATION_TYPE, PseudoAuthenticationHandler.TYPE);
+
+        final boolean useKerberos;
+        if (authenticationType == null || PseudoAuthenticationHandler.TYPE.equals(authenticationType)) {
+            useKerberos = false;
+        } else if (KerberosAuthenticationHandler.TYPE.equals(authenticationType)) {
+            useKerberos = true;
+        } else {
+            throw new IllegalArgumentException("Invalid attribute value for "
+                    + AUTHENTICATION_TYPE + " of " + authenticationType);
+        }
+
+        return useKerberos;
+    }
+
+    public static UserGroupInformation getProxyUser(String proxyUser) throws IOException {
+        UserGroupInformation proxyUgi = userUgiMap.get(proxyUser);
+        if (proxyUgi == null) {
+            // taking care of a race condition, the latest UGI will be discarded
+            proxyUgi = UserGroupInformation.createProxyUser(proxyUser, UserGroupInformation.getLoginUser());
+            userUgiMap.putIfAbsent(proxyUser, proxyUgi);
+        }
+
+        return proxyUgi;
+    }
+
+    public static String getLocalHostName() throws UnknownHostException {
+        return InetAddress.getLocalHost().getCanonicalHostName();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/falcon/update/UpdateHelper.java b/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
index 7ed4394..4580bad 100644
--- a/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
+++ b/common/src/main/java/org/apache/falcon/update/UpdateHelper.java
@@ -33,6 +33,7 @@ import org.apache.falcon.entity.v0.process.Cluster;
 import org.apache.falcon.entity.v0.process.Input;
 import org.apache.falcon.entity.v0.process.Inputs;
 import org.apache.falcon.entity.v0.process.Process;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -112,7 +113,8 @@ public final class UpdateHelper {
             }
 
             Path checksum = new Path(bundlePath, EntityUtil.PROCESS_CHECKSUM_FILE);
-            FileSystem fs = FileSystem.get(ClusterHelper.getConfiguration(clusterEntity));
+            Configuration conf = ClusterHelper.getConfiguration(clusterEntity);
+            FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(conf);
             if (!fs.exists(checksum)) {
                 //Update if there is no checksum file(for migration)
                 return true;
@@ -126,11 +128,7 @@ public final class UpdateHelper {
             }
 
             //Update if the user wf/lib is updated i.e., if checksums are different
-            if (!wfPaths.equals(checksums)) {
-                return true;
-            }
-
-            return false;
+            return !wfPaths.equals(checksums);
         } catch (IOException e) {
             throw new FalconException(e);
         }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/main/resources/startup.properties
----------------------------------------------------------------------
diff --git a/common/src/main/resources/startup.properties b/common/src/main/resources/startup.properties
index 3014418..51b3364 100644
--- a/common/src/main/resources/startup.properties
+++ b/common/src/main/resources/startup.properties
@@ -28,8 +28,9 @@
 *.ProcessInstanceManager.impl=org.apache.falcon.resource.InstanceManager
 *.catalog.service.impl=org.apache.falcon.catalog.HiveCatalogService
 
-*.application.services=org.apache.falcon.entity.store.ConfigurationStore,\
+*.application.services=org.apache.falcon.security.AuthenticationInitializationService,\
                         org.apache.falcon.service.ProcessSubscriberService,\
+                        org.apache.falcon.entity.store.ConfigurationStore,\
                         org.apache.falcon.rerun.service.RetryService,\
 						org.apache.falcon.rerun.service.LateRunService,\
 						org.apache.falcon.service.SLAMonitoringService,\
@@ -43,8 +44,13 @@
 
 ######### Implementation classes #########
 
+
+######### System startup parameters #########
+
+# Location of libraries that is shipped to Hadoop
 *.system.lib.location=${FALCON_HOME}/sharedlibs
 
+# Location to store user entity configurations
 debug.config.store.uri=file://${user.dir}/target/store
 debug.config.oozie.conf.uri=${user.dir}/target/oozie
 debug.system.lib.location=${system.lib.location}
@@ -56,8 +62,12 @@ debug.libext.process.paths=${falcon.libext}
 
 *.falcon.cleanup.service.frequency=days(1)
 
+
+######### Properties for configuring JMS provider - activemq #########
+# Default Active MQ url
 *.broker.url=tcp://localhost:61616
-#default time-to-live for a JMS message 3 days (time in minutes)
+
+# default time-to-live for a JMS message 3 days (time in minutes)
 *.broker.ttlInMins=4320
 *.entity.topic=FALCON.ENTITY.TOPIC
 *.max.retry.failure.count=1
@@ -65,3 +75,53 @@ debug.libext.process.paths=${falcon.libext}
 
 ######### Properties for configuring iMon client and metric #########
 *.internal.queue.size=1000
+
+
+######### Authentication Properties #########
+
+# Authentication type must be specified: simple|kerberos
+*.falcon.authentication.type=simple
+
+##### Service Configuration
+
+# Indicates the Kerberos principal to be used in Falcon Service.
+*.falcon.service.authentication.kerberos.principal=
+
+# Location of the keytab file with the credentials for the Service principal.
+*.falcon.service.authentication.kerberos.keytab=
+
+# name node principal to talk to config store
+*.dfs.namenode.kerberos.principal=
+
+##### SPNEGO Configuration
+
+# Authentication type must be specified: simple|kerberos|<class>
+# org.apache.falcon.security.RemoteUserInHeaderBasedAuthenticationHandler can be used for backwards compatibility
+*.falcon.http.authentication.type=simple
+
+# Indicates how long (in seconds) an authentication token is valid before it has to be renewed.
+*.falcon.http.authentication.token.validity=36000
+
+# The signature secret for signing the authentication tokens.
+*.falcon.http.authentication.signature.secret=falcon
+
+# The domain to use for the HTTP cookie that stores the authentication token.
+*.falcon.http.authentication.cookie.domain=
+
+# Indicates if anonymous requests are allowed when using 'simple' authentication.
+*.falcon.http.authentication.simple.anonymous.allowed=false
+
+# Indicates the Kerberos principal to be used for HTTP endpoint.
+# The principal MUST start with 'HTTP/' as per Kerberos HTTP SPNEGO specification.
+*.falcon.http.authentication.kerberos.principal=
+
+# Location of the keytab file with the credentials for the HTTP principal.
+*.falcon.http.authentication.kerberos.keytab=
+
+# The kerberos names rules is to resolve kerberos principal names, refer to Hadoop's KerberosName for more details.
+*.falcon.http.authentication.kerberos.name.rules=DEFAULT
+
+# Comma separated list of black listed users
+*.falcon.http.authentication.blacklisted.users=
+
+######### Authentication Properties #########

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java b/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java
index 7668c7f..6ec5b41 100644
--- a/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java
+++ b/common/src/test/java/org/apache/falcon/entity/AbstractTestBase.java
@@ -25,10 +25,10 @@ import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
-import org.apache.falcon.entity.v0.cluster.Interface;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
 import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.process.Process;
+import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -71,6 +71,8 @@ public class AbstractTestBase {
                 listeners.replace("org.apache.falcon.service.SharedLibraryHostingService", ""));
         store = ConfigurationStore.get();
         store.init();
+
+        CurrentUser.authenticate("falcon");
     }
 
     protected void cleanupStore() throws FalconException {
@@ -136,13 +138,4 @@ public class AbstractTestBase {
         marshaller.marshal(entity, stringWriter);
         return stringWriter.toString();
     }
-
-    private Interface newInterface(Interfacetype type, String endPoint,
-                                   String version) {
-        Interface iface = new Interface();
-        iface.setType(type);
-        iface.setEndpoint(endPoint);
-        iface.setVersion(version);
-        return iface;
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java b/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java
index 7b48d2b..1d56a78 100644
--- a/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java
+++ b/common/src/test/java/org/apache/falcon/entity/FileSystemStorageTest.java
@@ -162,18 +162,6 @@ public class FileSystemStorageTest {
     }
 
     @Test
-    public void testExists() throws Exception {
-        final Location location = new Location();
-        location.setPath("/foo/bar");
-        location.setType(LocationType.DATA);
-        List<Location> locations = new ArrayList<Location>();
-        locations.add(location);
-
-        FileSystemStorage storage = new FileSystemStorage("jail://global:00", locations);
-        Assert.assertTrue(storage.exists());
-    }
-
-    @Test
     public void testIsIdentical() throws Exception {
         final String storageUrl = "jail://global:00";
         final Location location1 = new Location();

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/test/java/org/apache/falcon/hadoop/HadoopClientFactoryTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/hadoop/HadoopClientFactoryTest.java b/common/src/test/java/org/apache/falcon/hadoop/HadoopClientFactoryTest.java
new file mode 100644
index 0000000..3b4e7f0
--- /dev/null
+++ b/common/src/test/java/org/apache/falcon/hadoop/HadoopClientFactoryTest.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.hadoop;
+
+import org.apache.falcon.cluster.util.EmbeddedCluster;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.testng.Assert;
+import org.testng.annotations.AfterClass;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.net.URI;
+
+/**
+ * Unit tests for HadoopClientFactory that doles out FileSystem handles.
+ */
+public class HadoopClientFactoryTest {
+
+    private EmbeddedCluster embeddedCluster;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        embeddedCluster = EmbeddedCluster.newCluster(getClass().getSimpleName());
+    }
+
+    @AfterClass
+    public void tearDown() throws Exception {
+        if (embeddedCluster != null) {
+            embeddedCluster.shutdown();
+        }
+    }
+
+    @Test
+    public void testGet() throws Exception {
+        HadoopClientFactory clientFactory = HadoopClientFactory.get();
+        Assert.assertNotNull(clientFactory);
+    }
+
+    @Test (enabled = false) // todo: cheated the conf to impersonate as same user
+    public void testCreateFileSystemWithSameUser() {
+        String user = System.getProperty("user.name");
+        try {
+            Configuration conf = embeddedCluster.getConf();
+            URI uri = new URI(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY));
+            Assert.assertNotNull(uri);
+            HadoopClientFactory.get().createProxiedFileSystem(user, uri, conf);
+            Assert.fail("Impersonation should have failed.");
+        } catch (Exception e) {
+            Assert.assertEquals(e.getCause().getClass(), RemoteException.class);
+        }
+    }
+
+    @Test
+    public void testCreateFileSystem() throws Exception {
+        Configuration conf = embeddedCluster.getConf();
+
+        UserGroupInformation.setConfiguration(conf);
+        UserGroupInformation realUser = UserGroupInformation.createUserForTesting(
+                "testuser", new String[]{"testgroup"});
+        UserGroupInformation.createProxyUserForTesting("proxyuser", realUser, new String[]{"proxygroup"});
+
+        URI uri = new URI(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY));
+        Assert.assertNotNull(uri);
+        FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem("testuser", uri, conf);
+        Assert.assertNotNull(fs);
+    }
+
+    @Test
+    public void testCreateFileSystemWithUser() throws Exception {
+        Configuration conf = embeddedCluster.getConf();
+
+        UserGroupInformation realUser = UserGroupInformation.createUserForTesting(
+                "testuser", new String[]{"testgroup"});
+        UserGroupInformation.createProxyUserForTesting("proxyuser", realUser, new String[]{"proxygroup"});
+        UserGroupInformation.setConfiguration(conf);
+
+        URI uri = new URI(conf.get(HadoopClientFactory.FS_DEFAULT_NAME_KEY));
+        Assert.assertNotNull(uri);
+        FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem("seetharam", uri, conf);
+        Assert.assertNotNull(fs);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/test/java/org/apache/falcon/security/AuthenticationInitializationServiceTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/security/AuthenticationInitializationServiceTest.java b/common/src/test/java/org/apache/falcon/security/AuthenticationInitializationServiceTest.java
new file mode 100644
index 0000000..9b76a61
--- /dev/null
+++ b/common/src/test/java/org/apache/falcon/security/AuthenticationInitializationServiceTest.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.security;
+
+import org.apache.falcon.FalconException;
+import org.apache.falcon.util.StartupProperties;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
+import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import java.io.File;
+
+
+/**
+ * Unit test for AuthenticationInitializationService that employs mocks.
+ */
+public class AuthenticationInitializationServiceTest {
+
+    private AuthenticationInitializationService authenticationService;
+
+    @Mock
+    private UserGroupInformation mockLoginUser;
+
+    @BeforeClass
+    public void setUp() {
+        MockitoAnnotations.initMocks(this);
+
+        authenticationService = new AuthenticationInitializationService();
+    }
+
+    @Test
+    public void testGetName() {
+        Assert.assertEquals("Authentication initialization service",
+                authenticationService.getName());
+    }
+
+    @Test
+    public void testInitForSimpleAuthenticationMethod() {
+        try {
+            StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE,
+                    PseudoAuthenticationHandler.TYPE);
+            authenticationService.init();
+
+            UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
+            Assert.assertFalse(loginUser.isFromKeytab());
+            Assert.assertEquals(loginUser.getAuthenticationMethod().name().toLowerCase(),
+                    PseudoAuthenticationHandler.TYPE);
+            Assert.assertEquals(System.getProperty("user.name"), loginUser.getUserName());
+        } catch (Exception e) {
+            Assert.fail("AuthenticationInitializationService init failed.", e);
+        }
+    }
+
+    @Test
+    public void testKerberosAuthenticationWithKeytabFileDoesNotExist() {
+        try {
+            StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE,
+                    KerberosAuthenticationHandler.TYPE);
+            StartupProperties.get().setProperty(AuthenticationInitializationService.KERBEROS_KEYTAB, "/blah/blah");
+            authenticationService.init();
+            Assert.fail("The keytab file does not exist! must have been thrown.");
+        } catch (Exception e) {
+            Assert.assertEquals(e.getCause().getClass(), IllegalArgumentException.class);
+        }
+    }
+
+    @Test
+    public void testKerberosAuthenticationWithKeytabFileIsADirectory() {
+        try {
+            StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE,
+                    KerberosAuthenticationHandler.TYPE);
+            StartupProperties.get().setProperty(AuthenticationInitializationService.KERBEROS_KEYTAB, "/tmp/");
+            authenticationService.init();
+            Assert.fail("The keytab file cannot be a directory! must have been thrown.");
+        } catch (Exception e) {
+            Assert.assertEquals(e.getCause().getClass(), IllegalArgumentException.class);
+        }
+    }
+
+    @Test
+    public void testKerberosAuthenticationWithKeytabFileNotReadable() {
+        File tempFile = new File(".keytabFile");
+        try {
+            assert tempFile.createNewFile();
+            assert tempFile.setReadable(false);
+
+            StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE,
+                    KerberosAuthenticationHandler.TYPE);
+            StartupProperties.get().setProperty(
+                    AuthenticationInitializationService.KERBEROS_KEYTAB, tempFile.toString());
+            authenticationService.init();
+            Assert.fail("The keytab file is not readable! must have been thrown.");
+        } catch (Exception e) {
+            Assert.assertEquals(e.getCause().getClass(), IllegalArgumentException.class);
+        } finally {
+            assert tempFile.delete();
+        }
+    }
+
+    @Test (enabled = false)
+    public void testInitForKerberosAuthenticationMethod() throws FalconException {
+        Mockito.when(mockLoginUser.getAuthenticationMethod())
+                .thenReturn(UserGroupInformation.AuthenticationMethod.KERBEROS);
+        Mockito.when(mockLoginUser.getUserName()).thenReturn("falcon");
+        Mockito.when(mockLoginUser.isFromKeytab()).thenReturn(Boolean.TRUE);
+
+        StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE,
+                KerberosAuthenticationHandler.TYPE);
+        StartupProperties.get().setProperty(
+                AuthenticationInitializationService.KERBEROS_KEYTAB, "falcon.kerberos.keytab");
+        StartupProperties.get().setProperty(AuthenticationInitializationService.KERBEROS_PRINCIPAL, "falcon");
+
+        authenticationService.init();
+
+        Assert.assertTrue(mockLoginUser.isFromKeytab());
+        Assert.assertEquals(mockLoginUser.getAuthenticationMethod().name(),
+                KerberosAuthenticationHandler.TYPE);
+        Assert.assertEquals("falcon", mockLoginUser.getUserName());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/common/src/test/java/org/apache/falcon/security/SecurityUtilTest.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/falcon/security/SecurityUtilTest.java b/common/src/test/java/org/apache/falcon/security/SecurityUtilTest.java
new file mode 100644
index 0000000..630aa4b
--- /dev/null
+++ b/common/src/test/java/org/apache/falcon/security/SecurityUtilTest.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.security;
+
+import org.apache.falcon.util.StartupProperties;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+/**
+ * Unit test for Security utils.
+ */
+public class SecurityUtilTest {
+
+    @Test
+    public void testDefaultGetAuthenticationType() throws Exception {
+        Assert.assertEquals(SecurityUtil.getAuthenticationType(), "simple");
+    }
+
+    @Test
+    public void testGetAuthenticationType() throws Exception {
+        try {
+            StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE, "kerberos");
+            Assert.assertEquals(SecurityUtil.getAuthenticationType(), "kerberos");
+        } finally {
+            // reset
+            StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE, "simple");
+        }
+    }
+
+    @Test
+    public void testIsSecurityEnabledByDefault() throws Exception {
+        Assert.assertFalse(SecurityUtil.isSecurityEnabled());
+    }
+
+    @Test
+    public void testIsSecurityEnabled() throws Exception {
+        try {
+            StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE, "kerberos");
+            Assert.assertTrue(SecurityUtil.isSecurityEnabled());
+        } finally {
+            // reset
+            StartupProperties.get().setProperty(SecurityUtil.AUTHENTICATION_TYPE, "simple");
+        }
+    }
+
+    @Test
+    public void testGetProxyUser() throws Exception {
+        UserGroupInformation proxyUgi = SecurityUtil.getProxyUser("proxy");
+        Assert.assertNotNull(proxyUgi);
+        Assert.assertEquals(proxyUgi.getUserName(), "proxy");
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/Security.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/Security.twiki b/docs/src/site/twiki/Security.twiki
new file mode 100644
index 0000000..c1f7656
--- /dev/null
+++ b/docs/src/site/twiki/Security.twiki
@@ -0,0 +1,193 @@
+---+ Securing Falcon
+
+---++ Overview
+
+Apache Falcon enforces authentication on protected resources. Once authentication has been established it sets a
+signed HTTP Cookie that contains an authentication token with the user name, user principal,
+authentication type and expiration time.
+
+It does so by using [[http://hadoop.apache .org/docs/current/hadoop-auth/index.html][Hadoop Auth]].
+Hadoop Auth is a Java library consisting of a client and a server components to enable Kerberos SPNEGO authentication
+for HTTP. Hadoop Auth also supports additional authentication mechanisms on the client and the server side via 2
+simple interfaces.
+
+
+---++ Authentication Methods
+
+It supports 2 authentication methods, simple and kerberos out of the box.
+
+---+++ Pseudo/Simple Authentication
+
+Falcon authenticates the user by simply trusting the value of the query string parameter 'user.name'. This is the
+default mode Falcon is configured with.
+
+---+++ Kerberos Authentication
+
+Falcon uses HTTP Kerberos SPNEGO to authenticate the user.
+
+---++ Server Side Configuration Setup
+
+---+++ Common Configuration Parameters
+
+<verbatim>
+# Authentication type must be specified: simple|kerberos
+*.falcon.authentication.type=kerberos
+</verbatim>
+
+---+++ Kerberos Configuration
+
+<verbatim>
+##### Service Configuration
+
+# Indicates the Kerberos principal to be used in Falcon Service.
+*.falcon.service.authentication.kerberos.principal=falcon/_HOST@EXAMPLE.COM
+
+# Location of the keytab file with the credentials for the Service principal.
+*.falcon.service.authentication.kerberos.keytab=/etc/security/keytabs/falcon.service.keytab
+
+# name node principal to talk to config store
+*.dfs.namenode.kerberos.principal=nn/_HOST@EXAMPLE.COM
+
+##### SPNEGO Configuration
+
+# Authentication type must be specified: simple|kerberos|<class>
+# org.apache.falcon.security.RemoteUserInHeaderBasedAuthenticationHandler can be used for backwards compatibility
+*.falcon.http.authentication.type=kerberos
+
+# Indicates how long (in seconds) an authentication token is valid before it has to be renewed.
+*.falcon.http.authentication.token.validity=36000
+
+# The signature secret for signing the authentication tokens.
+*.falcon.http.authentication.signature.secret=falcon
+
+# The domain to use for the HTTP cookie that stores the authentication token.
+*.falcon.http.authentication.cookie.domain=
+
+# Indicates if anonymous requests are allowed when using 'simple' authentication.
+*.falcon.http.authentication.simple.anonymous.allowed=true
+
+# Indicates the Kerberos principal to be used for HTTP endpoint.
+# The principal MUST start with 'HTTP/' as per Kerberos HTTP SPNEGO specification.
+*.falcon.http.authentication.kerberos.principal=HTTP/_HOST@EXAMPLE.COM
+
+# Location of the keytab file with the credentials for the HTTP principal.
+*.falcon.http.authentication.kerberos.keytab=/etc/security/keytabs/spnego.service.keytab
+
+# The kerberos names rules is to resolve kerberos principal names, refer to Hadoop's KerberosName for more details.
+*.falcon.http.authentication.kerberos.name.rules=DEFAULT
+
+# Comma separated list of black listed users
+*.falcon.http.authentication.blacklisted.users=
+</verbatim>
+
+---+++ Pseudo/Simple Configuration
+
+<verbatim>
+##### SPNEGO Configuration
+
+# Authentication type must be specified: simple|kerberos|<class>
+# org.apache.falcon.security.RemoteUserInHeaderBasedAuthenticationHandler can be used for backwards compatibility
+*.falcon.http.authentication.type=simple
+
+# Indicates how long (in seconds) an authentication token is valid before it has to be renewed.
+*.falcon.http.authentication.token.validity=36000
+
+# The signature secret for signing the authentication tokens.
+*.falcon.http.authentication.signature.secret=falcon
+
+# The domain to use for the HTTP cookie that stores the authentication token.
+*.falcon.http.authentication.cookie.domain=
+
+# Indicates if anonymous requests are allowed when using 'simple' authentication.
+*.falcon.http.authentication.simple.anonymous.allowed=true
+
+# Comma separated list of black listed users
+*.falcon.http.authentication.blacklisted.users=
+</verbatim>
+
+---+++ SSL Configuration
+
+<verbatim>
+*.falcon.enableTLS=true
+*.keystore.file=/path/to/keystore/file
+*.keystore.password=password
+</verbatim>
+
+---+++ Distributed Falcon Setup
+
+Falcon should be configured to communicate with Prism over TLS in secure mode. Its not enabled by default.
+
+
+---++ Changes to ownership and permissions of directories managed by Falcon
+
+| *Directory*             | *Location*                                                        | *Owner* | *Permissions* |
+| Configuration Store     | ${config.store.uri}                                               | falcon  | 750           |
+| Oozie coord/bundle XMLs | ${cluster.staging-location}/workflows/{entity}/{entity-name}      | falcon  | 644           |
+| Shared libs             | {cluster.working}/{lib,libext}                                    | falcon  | 755           |
+| App logs                | ${cluster.staging-location}/workflows/{entity}/{entity-name}/logs | falcon  | 777           |
+
+
+---++ Backwards compatibility
+
+---+++ Scheduled Entities
+
+Entities already scheduled with an earlier version of Falcon are not compatible with this version
+
+---+++ Falcon Clients
+
+Older Falcon clients are backwards compatible wrt Authentication and user information sent as part of the HTTP
+header, Remote-User is still honoured when the authentication type is configured as below:
+
+<verbatim>
+*.falcon.http.authentication.type=org.apache.falcon.security.RemoteUserInHeaderBasedAuthenticationHandler
+</verbatim>
+
+---+++ Blacklisted super users for authentication
+
+The blacklist users used to have the following super users: hdfs, mapreduce, oozie, and falcon.
+The list is externalized from code into Startup.properties file and is empty now and needs to be
+configured specifically in the file.
+
+
+---+++ Falcon Dashboard
+
+The dashboard assumes an anonymous user in Pseudo/Simple method and hence anonymous users must be enabled for it to
+work.
+<verbatim>
+# Indicates if anonymous requests are allowed when using 'simple' authentication.
+*.falcon.http.authentication.simple.anonymous.allowed=true
+</verbatim>
+
+In Kerberos method, the browser must support HTTP Kerberos SPNEGO.
+
+
+---++ Known Limitations
+
+   * ActiveMQ topics are not secure but will be in the near future
+   * Entities already scheduled with an earlier version of Falcon are not compatible with this version as new
+   workflow parameters are being passed back into Falcon such as the user are required
+   * Use of hftp as the scheme for read only interface in cluster entity [[https://issues.apache.org/jira/browse/HADOOP-10215][will not work in Oozie]]
+   The alternative is to use webhdfs scheme instead and its been tested with DistCp.
+
+
+---++ Examples
+
+---+++ Accessing the server using Falcon CLI (Java client)
+
+There is no change in the way the CLI is used. The CLI has been changed to work with the configured authentication
+method.
+
+---+++ Accessing the server using curl
+
+Try accessing protected resources using curl. The protected resources are:
+
+<verbatim>
+$ kinit
+Please enter the password for venkatesh@LOCALHOST:
+
+$ curl http://localhost:15000/api/admin/version
+
+$ curl http://localhost:15000/api/admin/version?user.name=venkatesh
+
+$ curl --negotiate -u foo -b ~/cookiejar.txt -c ~/cookiejar.txt curl http://localhost:15000/api/admin/version
+</verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/index.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/index.twiki b/docs/src/site/twiki/index.twiki
index 81c4c3e..ee48fbb 100644
--- a/docs/src/site/twiki/index.twiki
+++ b/docs/src/site/twiki/index.twiki
@@ -30,7 +30,8 @@ describes steps to on-board a pipeline to Falcon. It also gives a sample pipelin
 describes various options for the command line utility provided by Falcon.
 
 Falcon provides OOTB [[HiveIntegration][lifecycle management for Tables in Hive (HCatalog)]]
-such as table replication for BCP and table eviction.
+such as table replication for BCP and table eviction. Falcon also enforces
+[[Security][kerberos authentication]] on protected resources and enables SSL.
 
 #LicenseInfo
 ---+ Licensing Information

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/AdminConfig.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/AdminConfig.twiki b/docs/src/site/twiki/restapi/AdminConfig.twiki
index 2841b25..675b19e 100644
--- a/docs/src/site/twiki/restapi/AdminConfig.twiki
+++ b/docs/src/site/twiki/restapi/AdminConfig.twiki
@@ -17,7 +17,6 @@ Configuration information of the server.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/admin/config/deploy
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/AdminStack.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/AdminStack.twiki b/docs/src/site/twiki/restapi/AdminStack.twiki
index a241999..79dbd9b 100644
--- a/docs/src/site/twiki/restapi/AdminStack.twiki
+++ b/docs/src/site/twiki/restapi/AdminStack.twiki
@@ -16,7 +16,6 @@ Stack trace of the server.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/admin/stack
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/AdminVersion.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/AdminVersion.twiki b/docs/src/site/twiki/restapi/AdminVersion.twiki
index fbf1405..00b0283 100644
--- a/docs/src/site/twiki/restapi/AdminVersion.twiki
+++ b/docs/src/site/twiki/restapi/AdminVersion.twiki
@@ -16,7 +16,6 @@ Version of the server.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/admin/version
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntityDefinition.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntityDefinition.twiki b/docs/src/site/twiki/restapi/EntityDefinition.twiki
index 955be71..1f76a4f 100644
--- a/docs/src/site/twiki/restapi/EntityDefinition.twiki
+++ b/docs/src/site/twiki/restapi/EntityDefinition.twiki
@@ -18,7 +18,6 @@ Definition of the entity.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/entities/definition/process/SampleProcess
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntityDelete.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntityDelete.twiki b/docs/src/site/twiki/restapi/EntityDelete.twiki
index 7a7e22a..850b451 100644
--- a/docs/src/site/twiki/restapi/EntityDelete.twiki
+++ b/docs/src/site/twiki/restapi/EntityDelete.twiki
@@ -18,7 +18,6 @@ Results of the delete operation.
 ---+++ Rest Call
 <verbatim>
 DELETE http://localhost:15000/api/entities/delete/cluster/SampleProcess
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntityDependencies.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntityDependencies.twiki b/docs/src/site/twiki/restapi/EntityDependencies.twiki
index 6daab68..c61bc43 100644
--- a/docs/src/site/twiki/restapi/EntityDependencies.twiki
+++ b/docs/src/site/twiki/restapi/EntityDependencies.twiki
@@ -18,7 +18,6 @@ Dependenciess of the entity.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/entities/dependencies/process/SampleProcess
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntityList.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntityList.twiki b/docs/src/site/twiki/restapi/EntityList.twiki
index bca84b0..b9cf349 100644
--- a/docs/src/site/twiki/restapi/EntityList.twiki
+++ b/docs/src/site/twiki/restapi/EntityList.twiki
@@ -19,7 +19,6 @@ List of the entities.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/entities/list/feed
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>
@@ -40,7 +39,6 @@ Remote-User: rgautam
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/entities/list/feed?fields=status
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntityResume.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntityResume.twiki b/docs/src/site/twiki/restapi/EntityResume.twiki
index 223a83f..a2d5184 100644
--- a/docs/src/site/twiki/restapi/EntityResume.twiki
+++ b/docs/src/site/twiki/restapi/EntityResume.twiki
@@ -18,7 +18,6 @@ Result of the resume command.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/resume/process/SampleProcess
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntitySchedule.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntitySchedule.twiki b/docs/src/site/twiki/restapi/EntitySchedule.twiki
index e481613..dd97141 100644
--- a/docs/src/site/twiki/restapi/EntitySchedule.twiki
+++ b/docs/src/site/twiki/restapi/EntitySchedule.twiki
@@ -18,7 +18,6 @@ Result of the schedule command.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/schedule/process/SampleProcess
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntityStatus.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntityStatus.twiki b/docs/src/site/twiki/restapi/EntityStatus.twiki
index f0e772b..34d166d 100644
--- a/docs/src/site/twiki/restapi/EntityStatus.twiki
+++ b/docs/src/site/twiki/restapi/EntityStatus.twiki
@@ -18,7 +18,6 @@ Status of the entity.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/entities/status/process/SampleProcess
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntitySubmit.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntitySubmit.twiki b/docs/src/site/twiki/restapi/EntitySubmit.twiki
index e4b608e..925381c 100644
--- a/docs/src/site/twiki/restapi/EntitySubmit.twiki
+++ b/docs/src/site/twiki/restapi/EntitySubmit.twiki
@@ -17,7 +17,6 @@ Result of the submission.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/submit/feed
-Remote-User: rgautam
 <?xml version="1.0" encoding="UTF-8"?>
 <!-- Hourly sample input data -->
 
@@ -59,7 +58,6 @@ Remote-User: rgautam
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/submit/process
-Remote-User: rgautam
 <?xml version="1.0" encoding="UTF-8"?>
 <!-- Daily sample process. Runs at 6th hour every day. Input - last day's hourly data. Generates output for yesterday -->
 <process xmlns="uri:falcon:process:0.1" name="SampleProcess" >
@@ -103,4 +101,4 @@ Remote-User: rgautam
     "message": "default\/Submit successful (process) SampleProcess\n",
     "status": "SUCCEEDED"
 }
-</verbatim>
\ No newline at end of file
+</verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntitySubmitAndSchedule.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntitySubmitAndSchedule.twiki b/docs/src/site/twiki/restapi/EntitySubmitAndSchedule.twiki
index fb3649d..042a5fb 100644
--- a/docs/src/site/twiki/restapi/EntitySubmitAndSchedule.twiki
+++ b/docs/src/site/twiki/restapi/EntitySubmitAndSchedule.twiki
@@ -17,7 +17,6 @@ Result of the submit and schedule command.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/submitAndSchedule/process
-Remote-User: rgautam
 <?xml version="1.0" encoding="UTF-8"?>
 <!-- Daily sample process. Runs at 6th hour every day. Input - last day's hourly data. Generates output for yesterday -->
 <process xmlns="uri:falcon:process:0.1" name="SampleProcess" >

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntitySuspend.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntitySuspend.twiki b/docs/src/site/twiki/restapi/EntitySuspend.twiki
index 9d6e9ab..9e5efca 100644
--- a/docs/src/site/twiki/restapi/EntitySuspend.twiki
+++ b/docs/src/site/twiki/restapi/EntitySuspend.twiki
@@ -18,7 +18,6 @@ Status of the entity.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/suspend/process/SampleProcess
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntityUpdate.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntityUpdate.twiki b/docs/src/site/twiki/restapi/EntityUpdate.twiki
index 16ec439..f2c2e7e 100644
--- a/docs/src/site/twiki/restapi/EntityUpdate.twiki
+++ b/docs/src/site/twiki/restapi/EntityUpdate.twiki
@@ -19,7 +19,6 @@ Result of the validation.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/update/process/SampleProcess?effective=2014-01-01T00:00Z
-Remote-User: rgautam
 <?xml version="1.0" encoding="UTF-8"?>
 <!-- Daily sample process. Runs at 6th hour every day. Input - last day's hourly data. Generates output for yesterday -->
 <process xmlns="uri:falcon:process:0.1" name="SampleProcess" >

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/EntityValidate.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/EntityValidate.twiki b/docs/src/site/twiki/restapi/EntityValidate.twiki
index bc0f508..89bd155 100644
--- a/docs/src/site/twiki/restapi/EntityValidate.twiki
+++ b/docs/src/site/twiki/restapi/EntityValidate.twiki
@@ -17,7 +17,6 @@ Result of the validation.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/validate/cluster
-Remote-User: rgautam
 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
 <cluster xmlns="uri:falcon:cluster:0.1" name="primary-cluster" description="Primary Cluster" colo="west-coast">
     <interfaces>
@@ -46,7 +45,6 @@ Remote-User: rgautam
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/validate/feed
-Remote-User: rgautam
 <?xml version="1.0" encoding="UTF-8"?>
 <!-- Hourly sample input data -->
 
@@ -88,7 +86,6 @@ Remote-User: rgautam
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/validate/feed
-Remote-User: rgautam
 <?xml version="1.0" encoding="UTF-8"?>
 <!-- Daily sample output data -->
 
@@ -125,7 +122,6 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/entities/validate/process
-Remote-User: rgautam
 <?xml version="1.0" encoding="UTF-8"?>
 <!-- Daily sample process. Runs at 6th hour every day. Input - last day's hourly data. Generates output for yesterday -->
 <process xmlns="uri:falcon:process:0.1" name="SampleProcess" >
@@ -169,4 +165,4 @@ Remote-User: rgautam
     "message": "Validated successfully (PROCESS) SampleProcess",
     "status": "SUCCEEDED"
 }
-</verbatim>
\ No newline at end of file
+</verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/InstanceKill.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/InstanceKill.twiki b/docs/src/site/twiki/restapi/InstanceKill.twiki
index 5c429f6..eff2893 100644
--- a/docs/src/site/twiki/restapi/InstanceKill.twiki
+++ b/docs/src/site/twiki/restapi/InstanceKill.twiki
@@ -19,7 +19,6 @@ Result of the kill operation.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/instance/kill/process/SampleProcess?colo=*&start=2012-04-03T07:00Z
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/InstanceLogs.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/InstanceLogs.twiki b/docs/src/site/twiki/restapi/InstanceLogs.twiki
index f84b828..599f2d7 100644
--- a/docs/src/site/twiki/restapi/InstanceLogs.twiki
+++ b/docs/src/site/twiki/restapi/InstanceLogs.twiki
@@ -20,7 +20,6 @@ Log of specified instance.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/instance/logs/process/SampleProcess?colo=*&start=2012-04-03T07:00Z
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/InstanceRerun.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/InstanceRerun.twiki b/docs/src/site/twiki/restapi/InstanceRerun.twiki
index cf35475..77608e0 100644
--- a/docs/src/site/twiki/restapi/InstanceRerun.twiki
+++ b/docs/src/site/twiki/restapi/InstanceRerun.twiki
@@ -19,7 +19,6 @@ Results of the rerun command.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/instance/rerun/process/SampleProcess?colo=*&start=2012-04-03T07:00Z
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/InstanceResume.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/InstanceResume.twiki b/docs/src/site/twiki/restapi/InstanceResume.twiki
index 2bdd6e1..2d29569 100644
--- a/docs/src/site/twiki/restapi/InstanceResume.twiki
+++ b/docs/src/site/twiki/restapi/InstanceResume.twiki
@@ -19,7 +19,6 @@ Results of the resume command.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/instance/resume/process/SampleProcess?colo=*&start=2012-04-03T07:00Z
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/InstanceRunning.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/InstanceRunning.twiki b/docs/src/site/twiki/restapi/InstanceRunning.twiki
index 6b5ee66..116565f 100644
--- a/docs/src/site/twiki/restapi/InstanceRunning.twiki
+++ b/docs/src/site/twiki/restapi/InstanceRunning.twiki
@@ -18,7 +18,6 @@ List of instances currently running.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/instance/running/process/SampleProcess?colo=*
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/InstanceStatus.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/InstanceStatus.twiki b/docs/src/site/twiki/restapi/InstanceStatus.twiki
index eddc2c8..99497d1 100644
--- a/docs/src/site/twiki/restapi/InstanceStatus.twiki
+++ b/docs/src/site/twiki/restapi/InstanceStatus.twiki
@@ -20,7 +20,6 @@ Status of the specified instance.
 ---+++ Rest Call
 <verbatim>
 GET http://localhost:15000/api/instance/status/process/SampleProcess?colo=*&start=2012-04-03T07:00Z
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/InstanceSuspend.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/InstanceSuspend.twiki b/docs/src/site/twiki/restapi/InstanceSuspend.twiki
index 62cf72b..8f5f7c3 100644
--- a/docs/src/site/twiki/restapi/InstanceSuspend.twiki
+++ b/docs/src/site/twiki/restapi/InstanceSuspend.twiki
@@ -19,7 +19,6 @@ Results of the suspend command.
 ---+++ Rest Call
 <verbatim>
 POST http://localhost:15000/api/instance/suspend/process/SampleProcess?colo=*&start=2012-04-03T07:00Z
-Remote-User: rgautam
 </verbatim>
 ---+++ Result
 <verbatim>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/docs/src/site/twiki/restapi/ResourceList.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/restapi/ResourceList.twiki b/docs/src/site/twiki/restapi/ResourceList.twiki
index b9ec4b6..6ca0dea 100644
--- a/docs/src/site/twiki/restapi/ResourceList.twiki
+++ b/docs/src/site/twiki/restapi/ResourceList.twiki
@@ -1,8 +1,32 @@
 ---+ RESTful Resources
+
+---++ Resource List
    * <a href="#REST_Call_on_Entity_Resource">REST Call on Entity Resource</a>
    * <a href="#REST_Call_on_Feed_and_Process_Instances">REST Call on Feed/Process Instances</a>
    * <a href="#REST_Call_on_Admin_Resource">REST Call on Admin Resource</a>
 
+---++ Authentication
+
+When security is off (Pseudo/Simple), the authenticated user is the username specified in the user.name query
+parameter. If the user.name parameter is not set, the server may either set the authenticated user to a default web
+user, if there is any, or return an error response.
+
+When security is on (kerberos), authentication is performed by Kerberos SPNEGO.
+
+Below are examples using the curl command tool.
+
+Authentication when security is off (Pseudo/Simple):
+<verbatim>
+curl -i "http://<HOST>:<PORT>/<PATH>?[user.name=<USER>&]<PARAM>=..."
+</verbatim>
+
+Authentication using Kerberos SPNEGO when security is on:
+<verbatim>
+curl -i --negotiate -u : "http://<HOST>:<PORT>/<PATH>?<PARAM>=..."
+</verbatim>
+
+See also: [[../Security.twiki][Security in Falcon]]
+
 ---++ REST Call on Admin Resource
 
 | *Call Type* | *Resource*                                     | *Description*                               |

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/feed/src/main/java/org/apache/falcon/converter/OozieFeedMapper.java
----------------------------------------------------------------------
diff --git a/feed/src/main/java/org/apache/falcon/converter/OozieFeedMapper.java b/feed/src/main/java/org/apache/falcon/converter/OozieFeedMapper.java
index 6ca2134..d6dee77 100644
--- a/feed/src/main/java/org/apache/falcon/converter/OozieFeedMapper.java
+++ b/feed/src/main/java/org/apache/falcon/converter/OozieFeedMapper.java
@@ -38,6 +38,7 @@ import org.apache.falcon.entity.v0.feed.Feed;
 import org.apache.falcon.entity.v0.feed.LocationType;
 import org.apache.falcon.entity.v0.feed.Property;
 import org.apache.falcon.expression.ExpressionHelper;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.messaging.EntityInstanceMessage.ARG;
 import org.apache.falcon.messaging.EntityInstanceMessage.EntityOps;
 import org.apache.falcon.oozie.coordinator.ACTION;
@@ -45,6 +46,7 @@ import org.apache.falcon.oozie.coordinator.COORDINATORAPP;
 import org.apache.falcon.oozie.coordinator.SYNCDATASET;
 import org.apache.falcon.oozie.coordinator.WORKFLOW;
 import org.apache.falcon.oozie.workflow.WORKFLOWAPP;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.log4j.Logger;
@@ -426,7 +428,7 @@ public class OozieFeedMapper extends AbstractOozieEntityMapper<Feed> {
                     propagateTableStorageProperties(trgCluster, targetTableStorage, props, "falconTarget");
                     propagateTableCopyProperties(srcCluster, sourceTableStorage,
                             trgCluster, targetTableStorage, props);
-                    setupHiveConfiguration(trgCluster, sourceTableStorage, targetTableStorage, wfPath);
+                    setupHiveConfiguration(srcCluster, sourceTableStorage, trgCluster, targetTableStorage, wfPath);
                 }
 
                 propagateLateDataProperties(feed, instancePaths, sourceStorage.getType().name(), props);
@@ -479,9 +481,11 @@ public class OozieFeedMapper extends AbstractOozieEntityMapper<Feed> {
             props.put(prefix + "Partition", "${coord:dataInPartitionFilter('input', 'hive')}");
         }
 
-        private void setupHiveConfiguration(Cluster trgCluster, CatalogStorage sourceStorage,
-                                            CatalogStorage targetStorage, Path wfPath) throws IOException {
-            FileSystem fs = FileSystem.get(ClusterHelper.getConfiguration(trgCluster));
+        private void setupHiveConfiguration(Cluster srcCluster, CatalogStorage sourceStorage,
+                                            Cluster trgCluster, CatalogStorage targetStorage, Path wfPath)
+            throws IOException, FalconException {
+            Configuration conf = ClusterHelper.getConfiguration(trgCluster);
+            FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(conf);
 
             // copy import export scripts to stagingDir
             Path scriptPath = new Path(wfPath, "scripts");
@@ -490,8 +494,8 @@ public class OozieFeedMapper extends AbstractOozieEntityMapper<Feed> {
 
             // create hive conf to stagingDir
             Path confPath = new Path(wfPath + "/conf");
-            createHiveConf(fs, confPath, sourceStorage.getCatalogUrl(), "falcon-source-");
-            createHiveConf(fs, confPath, targetStorage.getCatalogUrl(), "falcon-target-");
+            createHiveConf(fs, confPath, sourceStorage.getCatalogUrl(), srcCluster, "falcon-source-");
+            createHiveConf(fs, confPath, targetStorage.getCatalogUrl(), trgCluster, "falcon-target-");
         }
 
         private void copyHiveScript(FileSystem fs, Path scriptPath,

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/feed/src/main/resources/config/workflow/replication-workflow.xml
----------------------------------------------------------------------
diff --git a/feed/src/main/resources/config/workflow/replication-workflow.xml b/feed/src/main/resources/config/workflow/replication-workflow.xml
index 91d0285..7a95c35 100644
--- a/feed/src/main/resources/config/workflow/replication-workflow.xml
+++ b/feed/src/main/resources/config/workflow/replication-workflow.xml
@@ -218,6 +218,8 @@
             <arg>${wf:id()}</arg>
             <arg>-logDir</arg>
             <arg>${logDir}/job-${nominalTime}/${srcClusterName}/</arg>
+            <arg>-workflowUser</arg>
+            <arg>${wf:user()}</arg>
             <file>${wf:conf("falcon.libpath")}/activemq-core.jar</file>
             <file>${wf:conf("falcon.libpath")}/geronimo-j2ee-management.jar</file>
             <file>${wf:conf("falcon.libpath")}/jms.jar</file>
@@ -283,6 +285,8 @@
             <arg>${wf:id()}</arg>
             <arg>-logDir</arg>
             <arg>${logDir}/job-${nominalTime}/${srcClusterName}/</arg>
+            <arg>-workflowUser</arg>
+            <arg>${wf:user()}</arg>
             <file>${wf:conf("falcon.libpath")}/activemq-core.jar</file>
             <file>${wf:conf("falcon.libpath")}/geronimo-j2ee-management.jar</file>
             <file>${wf:conf("falcon.libpath")}/jms.jar</file>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/feed/src/main/resources/config/workflow/retention-workflow.xml
----------------------------------------------------------------------
diff --git a/feed/src/main/resources/config/workflow/retention-workflow.xml b/feed/src/main/resources/config/workflow/retention-workflow.xml
index 8b444f5..08795b4 100644
--- a/feed/src/main/resources/config/workflow/retention-workflow.xml
+++ b/feed/src/main/resources/config/workflow/retention-workflow.xml
@@ -107,6 +107,8 @@
             <arg>${wf:conf("broker.ttlInMins")}</arg>
             <arg>-cluster</arg>
             <arg>${cluster}</arg>
+            <arg>-workflowUser</arg>
+            <arg>${wf:user()}</arg>
             <file>${wf:conf("falcon.libpath")}/activemq-core.jar</file>
             <file>${wf:conf("falcon.libpath")}/geronimo-j2ee-management.jar</file>
             <file>${wf:conf("falcon.libpath")}/jms.jar</file>
@@ -119,8 +121,8 @@
     </action>
 
     <kill name="fail">
-        <message>Workflow failed, error
-            message[${wf:errorMessage(wf:lastErrorNode())}]
+        <message>
+            Workflow failed, error message[${wf:errorMessage(wf:lastErrorNode())}]
         </message>
     </kill>
     <end name='end'/>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/feed/src/test/java/org/apache/falcon/converter/OozieFeedMapperTest.java
----------------------------------------------------------------------
diff --git a/feed/src/test/java/org/apache/falcon/converter/OozieFeedMapperTest.java b/feed/src/test/java/org/apache/falcon/converter/OozieFeedMapperTest.java
index a37755b..a153462 100644
--- a/feed/src/test/java/org/apache/falcon/converter/OozieFeedMapperTest.java
+++ b/feed/src/test/java/org/apache/falcon/converter/OozieFeedMapperTest.java
@@ -40,6 +40,7 @@ import org.apache.falcon.oozie.workflow.ACTION;
 import org.apache.falcon.oozie.workflow.DECISION;
 import org.apache.falcon.oozie.workflow.JAVA;
 import org.apache.falcon.oozie.workflow.WORKFLOWAPP;
+import org.apache.falcon.security.CurrentUser;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.testng.Assert;
@@ -82,6 +83,8 @@ public class OozieFeedMapperTest {
 
     @BeforeClass
     public void setUpDFS() throws Exception {
+        CurrentUser.authenticate("falcon");
+
         srcMiniDFS = EmbeddedCluster.newCluster("cluster1");
         String srcHdfsUrl = srcMiniDFS.getConf().get("fs.default.name");
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/hadoop-webapp/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-webapp/pom.xml b/hadoop-webapp/pom.xml
index e576310..846a43e 100644
--- a/hadoop-webapp/pom.xml
+++ b/hadoop-webapp/pom.xml
@@ -161,11 +161,6 @@
 
         <!-- Oozie dependencies -->
         <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-exec</artifactId>
-        </dependency>
-
-        <dependency>
             <groupId>org.apache.hcatalog</groupId>
             <artifactId>webhcat-java-client</artifactId>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/html5-ui/js/falcon.js
----------------------------------------------------------------------
diff --git a/html5-ui/js/falcon.js b/html5-ui/js/falcon.js
index ff3a929..0a75f6a 100644
--- a/html5-ui/js/falcon.js
+++ b/html5-ui/js/falcon.js
@@ -17,21 +17,26 @@
 (function(exports) {
   "use strict";
 
-  var USER_ID = 'admin';
+  var USER_ID = 'dashboard';
 
   function onError (msg) {
     alert(msg);
   }
 
   function ajax_impl(options) {
-    $.extend(options, {'headers': { 'Remote-User': USER_ID }});
+    // $.extend(options, add_user(options.url));
     return $.ajax(options);
   }
 
+  function add_user(url) {
+    var paramSeparator = (url.indexOf('?') != -1) ? '&' : '?';
+    return url + paramSeparator + 'user.name=' + USER_ID;
+  }
+
   function getJson_impl(url, success) {
     return ajax_impl({
       'dataType': 'json',
-      'url': url,
+      'url': add_user(url),
       'success': success
     });
   }
@@ -39,7 +44,7 @@
   function getText_impl(url, success) {
     return ajax_impl({
       'dataType': 'text',
-      'url': url,
+      'url': add_user(url),
       'success': success
     });
   }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/messaging/pom.xml
----------------------------------------------------------------------
diff --git a/messaging/pom.xml b/messaging/pom.xml
index 9aa5347..aa5765c 100644
--- a/messaging/pom.xml
+++ b/messaging/pom.xml
@@ -82,6 +82,11 @@
     <dependencies>
         <dependency>
             <groupId>org.apache.falcon</groupId>
+            <artifactId>falcon-common</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.falcon</groupId>
             <artifactId>falcon-test-util</artifactId>
             <scope>test</scope>
         </dependency>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
----------------------------------------------------------------------
diff --git a/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java b/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
index eb49fd5..d8ba4f3 100644
--- a/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
+++ b/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessage.java
@@ -70,7 +70,8 @@ public class EntityInstanceMessage {
         topicName("topicName"),
         status("status"),
         brokerTTL("broker.ttlInMins"),
-        cluster("cluster");
+        cluster("cluster"),
+        workflowUser("workflowUser");
 
         private String propName;
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessageCreator.java
----------------------------------------------------------------------
diff --git a/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessageCreator.java b/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessageCreator.java
index ecda5eb..c8ea12d 100644
--- a/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessageCreator.java
+++ b/messaging/src/main/java/org/apache/falcon/messaging/EntityInstanceMessageCreator.java
@@ -41,18 +41,16 @@ public class EntityInstanceMessageCreator {
 
     public Message createMessage(Session session) throws JMSException {
         mapMessage = session.createMapMessage();
-        for (Entry<ARG, String> entry : instanceMessage.getKeyValueMap()
-                .entrySet()) {
-            mapMessage.setString(entry.getKey().getArgName(), instanceMessage
-                    .getKeyValueMap().get(entry.getKey()));
+        for (Entry<ARG, String> entry : instanceMessage.getKeyValueMap().entrySet()) {
+            mapMessage.setString(entry.getKey().getArgName(),
+                    instanceMessage.getKeyValueMap().get(entry.getKey()));
         }
-        return mapMessage;
 
+        return mapMessage;
     }
 
     @Override
     public String toString() {
         return this.mapMessage.toString();
     }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
----------------------------------------------------------------------
diff --git a/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java b/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
index b37931c..0f9e918 100644
--- a/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
+++ b/messaging/src/main/java/org/apache/falcon/messaging/MessageProducer.java
@@ -65,7 +65,6 @@ public class MessageProducer extends Configured implements Tool {
         producer.setTimeToLive(messageTTL);
         producer.send(new EntityInstanceMessageCreator(entityInstanceMessage)
                 .createMessage(session));
-
     }
 
     public static void main(String[] args) throws Exception {
@@ -124,6 +123,8 @@ public class MessageProducer extends Configured implements Tool {
                 "workflow id"));
         addOption(options, new Option(ARG.cluster.getArgName(), true,
                 "cluster name"));
+        addOption(options, new Option(ARG.workflowUser.getArgName(), true,
+                "workflow user id"), false);
 
         return new GnuParser().parse(options, arguments);
     }
@@ -152,14 +153,12 @@ public class MessageProducer extends Configured implements Tool {
             return 0;
         }
 
-        MessageProducer falconMessageProducer = new MessageProducer();
         try {
-            falconMessageProducer.createAndStartConnection(
-                    cmd.getOptionValue(ARG.brokerImplClass.name()), "",
+            createAndStartConnection(cmd.getOptionValue(ARG.brokerImplClass.name()), "",
                     "", cmd.getOptionValue(ARG.brokerUrl.name()));
             for (EntityInstanceMessage message : entityInstanceMessage) {
                 LOG.info("Sending message:" + message.getKeyValueMap());
-                falconMessageProducer.sendMessage(message);
+                sendMessage(message);
             }
         } catch (JMSException e) {
             LOG.error("Error in getConnection:", e);
@@ -167,8 +166,8 @@ public class MessageProducer extends Configured implements Tool {
             LOG.error("Error in getConnection:", e);
         } finally {
             try {
-                if (falconMessageProducer.connection != null) {
-                    falconMessageProducer.connection.close();
+                if (connection != null) {
+                    connection.close();
                 }
             } catch (JMSException e) {
                 LOG.error("Error in closing connection:", e);

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/messaging/src/test/java/org/apache/falcon/messaging/FalconTopicProducerTest.java
----------------------------------------------------------------------
diff --git a/messaging/src/test/java/org/apache/falcon/messaging/FalconTopicProducerTest.java b/messaging/src/test/java/org/apache/falcon/messaging/FalconTopicProducerTest.java
index da126c7..3f0c664 100644
--- a/messaging/src/test/java/org/apache/falcon/messaging/FalconTopicProducerTest.java
+++ b/messaging/src/test/java/org/apache/falcon/messaging/FalconTopicProducerTest.java
@@ -37,8 +37,6 @@ import javax.jms.*;
 public class FalconTopicProducerTest {
 
     private static final String BROKER_URL = "vm://localhost?broker.useJmx=false&broker.persistent=true";
-    // private static final String BROKER_URL =
-    // "tcp://localhost:61616?daemon=true";
     private static final String BROKER_IMPL_CLASS = "org.apache.activemq.ActiveMQConnectionFactory";
     private static final String TOPIC_NAME = "FALCON.ENTITY.TOPIC";
     private static final String SECONDARY_TOPIC_NAME = "FALCON.ENTITY.SEC.TOPIC";
@@ -140,6 +138,7 @@ public class FalconTopicProducerTest {
     private List<String> createCommonArgs() {
         return new ArrayList<String>(Arrays.asList(
                 "-" + ARG.workflowId.getArgName(), "workflow-01-00",
+                "-" + ARG.workflowUser.getArgName(), "falcon",
                 "-" + ARG.runId.getArgName(), "1",
                 "-" + ARG.nominalTime.getArgName(), "2011-01-01-01-00",
                 "-" + ARG.timeStamp.getArgName(), "2012-01-01-01-00",
@@ -169,6 +168,7 @@ public class FalconTopicProducerTest {
             }
         };
         t.start();
+        Thread.sleep(100);
         for (String[] message : messages) {
             new MessageProducer().run(message);
         }
@@ -203,6 +203,8 @@ public class FalconTopicProducerTest {
                 "agg-coord");
         Assert.assertEquals(m.getString(ARG.workflowId.getArgName()),
                 "workflow-01-00");
+        Assert.assertEquals(m.getString(ARG.workflowUser.getArgName()),
+                "falcon");
         Assert.assertEquals(m.getString(ARG.runId.getArgName()), "1");
         Assert.assertEquals(m.getString(ARG.nominalTime.getArgName()),
                 "2011-01-01T01:00Z");

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java
----------------------------------------------------------------------
diff --git a/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java b/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java
index e707567..57ccdc5 100644
--- a/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java
+++ b/messaging/src/test/java/org/apache/falcon/messaging/FeedProducerTest.java
@@ -73,6 +73,7 @@ public class FeedProducerTest {
                             "-" + ARG.feedInstancePaths.getArgName(),
                             "/click-logs/10/05/05/00/20",
                             "-" + ARG.workflowId.getArgName(), "workflow-01-00",
+                            "-" + ARG.workflowUser.getArgName(), "falcon",
                             "-" + ARG.runId.getArgName(), "1",
                             "-" + ARG.nominalTime.getArgName(), "2011-01-01-01-00",
                             "-" + ARG.timeStamp.getArgName(), "2012-01-01-01-00",
@@ -200,6 +201,8 @@ public class FeedProducerTest {
         Assert.assertEquals(m.getString(ARG.operation.getArgName()), "DELETE");
         Assert.assertEquals(m.getString(ARG.workflowId.getArgName()),
                 "workflow-01-00");
+        Assert.assertEquals(m.getString(ARG.workflowUser.getArgName()),
+                "falcon");
         Assert.assertEquals(m.getString(ARG.runId.getArgName()), "1");
         Assert.assertEquals(m.getString(ARG.nominalTime.getArgName()),
                 "2011-01-01T01:00Z");

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/messaging/src/test/java/org/apache/falcon/messaging/ProcessProducerTest.java
----------------------------------------------------------------------
diff --git a/messaging/src/test/java/org/apache/falcon/messaging/ProcessProducerTest.java b/messaging/src/test/java/org/apache/falcon/messaging/ProcessProducerTest.java
index 3a40e76..9f8b07f 100644
--- a/messaging/src/test/java/org/apache/falcon/messaging/ProcessProducerTest.java
+++ b/messaging/src/test/java/org/apache/falcon/messaging/ProcessProducerTest.java
@@ -35,8 +35,6 @@ public class ProcessProducerTest {
 
     private String[] args;
     private static final String BROKER_URL = "vm://localhost?broker.useJmx=false&broker.persistent=true";
-    // private static final String BROKER_URL =
-    // "tcp://localhost:61616?daemon=true";
     private static final String BROKER_IMPL_CLASS = "org.apache.activemq.ActiveMQConnectionFactory";
     private static final String TOPIC_NAME = "FALCON.PROCESS";
     private BrokerService broker;
@@ -50,6 +48,7 @@ public class ProcessProducerTest {
                             "-" + ARG.feedInstancePaths.getArgName(),
                             "/click-logs/10/05/05/00/20,/raw-logs/10/05/05/00/20",
                             "-" + ARG.workflowId.getArgName(), "workflow-01-00",
+                            "-" + ARG.workflowUser.getArgName(), "falcon",
                             "-" + ARG.runId.getArgName(), "1",
                             "-" + ARG.nominalTime.getArgName(), "2011-01-01-01-00",
                             "-" + ARG.timeStamp.getArgName(), "2012-01-01-01-00",
@@ -139,6 +138,8 @@ public class ProcessProducerTest {
                 TOPIC_NAME);
         Assert.assertEquals(m.getString(ARG.workflowId.getArgName()),
                 "workflow-01-00");
+        Assert.assertEquals(m.getString(ARG.workflowUser.getArgName()),
+                "falcon");
         Assert.assertEquals(m.getString(ARG.runId.getArgName()), "1");
         Assert.assertEquals(m.getString(ARG.nominalTime.getArgName()),
                 "2011-01-01T01:00Z");

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/metrics/src/main/java/org/apache/falcon/aspect/GenericAlert.java
----------------------------------------------------------------------
diff --git a/metrics/src/main/java/org/apache/falcon/aspect/GenericAlert.java b/metrics/src/main/java/org/apache/falcon/aspect/GenericAlert.java
index 275a725..0b680ba 100644
--- a/metrics/src/main/java/org/apache/falcon/aspect/GenericAlert.java
+++ b/metrics/src/main/java/org/apache/falcon/aspect/GenericAlert.java
@@ -38,6 +38,7 @@ public final class GenericAlert {
             @Dimension(value = "entity-name") String entityName,
             @Dimension(value = "nominal-name") String nominalTime,
             @Dimension(value = "wf-id") String wfId,
+            @Dimension(value = "wf-user") String workflowUser,
             @Dimension(value = "run-id") String runId,
             @Dimension(value = "error-message") String message) {
         return "IGNORE";
@@ -49,6 +50,7 @@ public final class GenericAlert {
             @Dimension(value = "entity-name") String entityName,
             @Dimension(value = "nominal-name") String nominalTime,
             @Dimension(value = "wf-id") String wfId,
+            @Dimension(value = "wf-user") String workflowUser,
             @Dimension(value = "run-id") String runId,
             @Dimension(value = "error-message") String message) {
         return "IGNORE";
@@ -62,6 +64,7 @@ public final class GenericAlert {
             @Dimension(value = "entity-name") String entityName,
             @Dimension(value = "nominal-time") String nominalTime,
             @Dimension(value = "wf-id") String workflowId,
+            @Dimension(value = "wf-user") String workflowUser,
             @Dimension(value = "run-id") String runId,
             @Dimension(value = "operation") String operation,
             @Dimension(value = "start-time") String startTime,
@@ -80,6 +83,7 @@ public final class GenericAlert {
             @Dimension(value = "entity-name") String entityName,
             @Dimension(value = "nominal-time") String nominalTime,
             @Dimension(value = "wf-id") String workflowId,
+            @Dimension(value = "wf-user") String workflowUser,
             @Dimension(value = "run-id") String runId,
             @Dimension(value = "operation") String operation,
             @Dimension(value = "start-time") String startTime,

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/falcon/converter/AbstractOozieEntityMapper.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/converter/AbstractOozieEntityMapper.java b/oozie/src/main/java/org/apache/falcon/converter/AbstractOozieEntityMapper.java
index cecdeef..0762514 100644
--- a/oozie/src/main/java/org/apache/falcon/converter/AbstractOozieEntityMapper.java
+++ b/oozie/src/main/java/org/apache/falcon/converter/AbstractOozieEntityMapper.java
@@ -30,6 +30,7 @@ import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Property;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.messaging.EntityInstanceMessage.ARG;
 import org.apache.falcon.oozie.bundle.BUNDLEAPP;
 import org.apache.falcon.oozie.bundle.COORDINATOR;
@@ -37,6 +38,7 @@ import org.apache.falcon.oozie.coordinator.COORDINATORAPP;
 import org.apache.falcon.oozie.coordinator.ObjectFactory;
 import org.apache.falcon.oozie.workflow.ACTION;
 import org.apache.falcon.oozie.workflow.WORKFLOWAPP;
+import org.apache.falcon.security.SecurityUtil;
 import org.apache.falcon.service.FalconPathFilter;
 import org.apache.falcon.service.SharedLibraryHostingService;
 import org.apache.falcon.util.RuntimeProperties;
@@ -45,8 +47,10 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.log4j.Logger;
 import org.apache.oozie.client.OozieClient;
 
@@ -141,7 +145,7 @@ public abstract class AbstractOozieEntityMapper<T extends Entity> {
             Path coordPath = getCoordPath(bundlePath, coordinatorapp.getName());
             String coordXmlName = marshal(cluster, coordinatorapp, coordPath,
                     EntityUtil.getWorkflowNameSuffix(coordinatorapp.getName(), entity));
-            createTempDir(cluster, coordPath);
+            createLogsDir(cluster, coordPath);
             COORDINATOR bundleCoord = new COORDINATOR();
             bundleCoord.setName(coordinatorapp.getName());
             bundleCoord.setAppPath(getStoragePath(coordPath) + "/" + coordXmlName);
@@ -192,9 +196,9 @@ public abstract class AbstractOozieEntityMapper<T extends Entity> {
     }
 
     protected void addLibExtensionsToWorkflow(Cluster cluster, WORKFLOWAPP wf, EntityType type, String lifecycle)
-        throws IOException {
+        throws IOException, FalconException {
         String libext = ClusterHelper.getLocation(cluster, "working") + "/libext";
-        FileSystem fs = FileSystem.get(ClusterHelper.getConfiguration(cluster));
+        FileSystem fs = HadoopClientFactory.get().createProxiedFileSystem(ClusterHelper.getConfiguration(cluster));
         addExtensionJars(fs, new Path(libext), wf);
         addExtensionJars(fs, new Path(libext, type.name()), wf);
         if (StringUtils.isNotEmpty(lifecycle)) {
@@ -208,7 +212,6 @@ public abstract class AbstractOozieEntityMapper<T extends Entity> {
             SharedLibraryHostingService.pushLibsToHDFS(StartupProperties.get().getProperty("system.lib.location"),
                     libPath, cluster, FALCON_JAR_FILTER);
         } catch (IOException e) {
-            LOG.error("Failed to copy shared libs on cluster " + cluster.getName(), e);
             throw new FalconException("Failed to copy shared libs on cluster " + cluster.getName(), e);
         }
     }
@@ -286,11 +289,11 @@ public abstract class AbstractOozieEntityMapper<T extends Entity> {
 
     protected void marshal(Cluster cluster, JAXBElement<?> jaxbElement, JAXBContext jaxbContext, Path outPath)
         throws FalconException {
-
         try {
             Marshaller marshaller = jaxbContext.createMarshaller();
             marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
-            FileSystem fs = outPath.getFileSystem(ClusterHelper.getConfiguration(cluster));
+            FileSystem fs = HadoopClientFactory.get().createFileSystem(
+                    outPath.toUri(), ClusterHelper.getConfiguration(cluster));
             OutputStream out = fs.create(outPath);
             try {
                 marshaller.marshal(jaxbElement, out);
@@ -310,12 +313,16 @@ public abstract class AbstractOozieEntityMapper<T extends Entity> {
         }
     }
 
-    private void createTempDir(Cluster cluster, Path coordPath) throws FalconException {
+    private void createLogsDir(Cluster cluster, Path coordPath) throws FalconException {
         try {
-            FileSystem fs = coordPath.getFileSystem(ClusterHelper.getConfiguration(cluster));
-            Path tempDir = new Path(coordPath, "../../logs");
-            fs.mkdirs(tempDir);
-            fs.setPermission(tempDir, new FsPermission((short) 511));
+            FileSystem fs = HadoopClientFactory.get().createFileSystem(
+                    coordPath.toUri(), ClusterHelper.getConfiguration(cluster));
+            Path logsDir = new Path(coordPath, "../../logs");
+            fs.mkdirs(logsDir);
+
+            // logs are copied with in oozie as the user in Post Processing and hence 777 permissions
+            FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL);
+            fs.setPermission(logsDir, permission);
         } catch (Exception e) {
             throw new FalconException("Unable to create temp dir in " + coordPath, e);
         }
@@ -334,8 +341,7 @@ public abstract class AbstractOozieEntityMapper<T extends Entity> {
 
         marshal(cluster, new org.apache.falcon.oozie.bundle.ObjectFactory().createBundleApp(bundle),
                 BUNDLE_JAXB_CONTEXT,
-                new Path(
-                        outPath, "bundle.xml"));
+                new Path(outPath, "bundle.xml"));
     }
 
     protected void marshal(Cluster cluster, WORKFLOWAPP workflow, Path outPath) throws FalconException {
@@ -394,11 +400,17 @@ public abstract class AbstractOozieEntityMapper<T extends Entity> {
     }
 
     protected void createHiveConf(FileSystem fs, Path confPath, String metastoreUrl,
-                                  String prefix) throws IOException {
+                                  Cluster cluster, String prefix) throws IOException {
         Configuration hiveConf = new Configuration(false);
         hiveConf.set(HiveConf.ConfVars.METASTOREURIS.varname, metastoreUrl);
         hiveConf.set("hive.metastore.local", "false");
 
+        if (UserGroupInformation.isSecurityEnabled()) {
+            hiveConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname,
+                    ClusterHelper.getPropertyValue(cluster, SecurityUtil.HIVE_METASTORE_PRINCIPAL));
+            hiveConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true");
+        }
+
         OutputStream out = null;
         try {
             out = fs.create(new Path(confPath, prefix + "hive-site.xml"));

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/logging/LogMover.java b/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
index d544311..92b90e7 100644
--- a/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
+++ b/oozie/src/main/java/org/apache/falcon/logging/LogMover.java
@@ -46,7 +46,7 @@ import java.net.URLConnection;
 import java.util.List;
 
 /**
- * Utitlity called in the post process of oozie workflow to move oozie action executor log.
+ * Utility called in the post process of oozie workflow to move oozie action executor log.
  */
 public class LogMover extends Configured implements Tool {
 


[3/5] FALCON-11 Add support for security in Falcon. Contributed by Venkatesh Seetharam

Posted by ve...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java b/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
index 48d4589..0c338da 100644
--- a/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
+++ b/oozie/src/main/java/org/apache/falcon/logging/LogProvider.java
@@ -25,6 +25,7 @@ import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.resource.InstancesResult.Instance;
 import org.apache.falcon.resource.InstancesResult.InstanceAction;
 import org.apache.hadoop.conf.Configuration;
@@ -48,15 +49,12 @@ public final class LogProvider {
 
         Cluster clusterObj = ConfigurationStore.get().get(
                 EntityType.CLUSTER, instance.cluster);
-        String resolvedRunId = "-";
         try {
-            FileSystem fs = FileSystem.get(
-                    new Path(ClusterHelper.getStorageUrl(clusterObj)).toUri(),
-                    new Configuration());
-            resolvedRunId = getResolvedRunId(fs, clusterObj, entity, instance,
-                    runId);
-            // if runId param is not resolved, i.e job is killed or not started
-            // or running
+            Configuration conf = ClusterHelper.getConfiguration(clusterObj);
+            // fs on behalf of the end user.
+            FileSystem fs = HadoopClientFactory.get().createFileSystem(conf);
+            String resolvedRunId = getResolvedRunId(fs, clusterObj, entity, instance, runId);
+            // if runId param is not resolved, i.e job is killed or not started or running
             if (resolvedRunId.equals("-")
                     && StringUtils.isEmpty(instance.logFile)) {
                 instance.logFile = "-";
@@ -107,13 +105,13 @@ public final class LogProvider {
     }
 
     private Instance populateActionLogUrls(FileSystem fs, Cluster cluster,
-                                           Entity entity, Instance instance, String formatedRunId)
+                                           Entity entity, Instance instance, String formattedRunId)
         throws FalconException, OozieClientException, IOException {
 
         Path actionPaths = new Path(ClusterHelper.getStorageUrl(cluster),
                 EntityUtil.getLogPath(cluster, entity) + "/job-"
                         + EntityUtil.fromUTCtoURIDate(instance.instance) + "/"
-                        + formatedRunId + "/*");
+                        + formattedRunId + "/*");
         FileStatus[] actions = fs.globStatus(actionPaths);
         InstanceAction[] instanceActions = new InstanceAction[actions.length - 1];
         instance.actions = instanceActions;
@@ -124,7 +122,7 @@ public final class LogProvider {
                     ClusterHelper.getStorageUrl(cluster),
                     EntityUtil.getLogPath(cluster, entity) + "/job-"
                             + EntityUtil.fromUTCtoURIDate(instance.instance) + "/"
-                            + formatedRunId, file.getPath().getName());
+                            + formattedRunId, file.getPath().getName());
             if (filePath.getName().equals("oozie.log")) {
                 instance.logFile = dfsBrowserUrl;
                 continue;

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java b/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
index 37f8cfa..63c16ad 100644
--- a/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
+++ b/oozie/src/main/java/org/apache/falcon/service/SharedLibraryHostingService.java
@@ -26,6 +26,7 @@ import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.falcon.entity.v0.cluster.Interfacetype;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.util.StartupProperties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -94,14 +95,13 @@ public class SharedLibraryHostingService implements ConfigurationChangeListener
         }
 
         LOG.debug("Copying libs from " + src);
-        Configuration conf = ClusterHelper.getConfiguration(cluster);
-        conf.setInt("ipc.client.connect.max.retries", 10);
-        FileSystem fs = null;
+        FileSystem fs;
         try {
-            fs = FileSystem.get(conf);
+            fs = getFileSystem(cluster);
+            fs.getConf().set("dfs.umaskmode", "022");  // drwxr-xr-x
         } catch (Exception e) {
             throw new FalconException("Unable to connect to HDFS: "
-                    + ClusterHelper.getStorageUrl(cluster));
+                    + ClusterHelper.getStorageUrl(cluster), e);
         }
         if (!fs.exists(target)) {
             fs.mkdirs(target);
@@ -137,6 +137,15 @@ public class SharedLibraryHostingService implements ConfigurationChangeListener
         }
     }
 
+    // the dir is owned by Falcon but world-readable
+    private static FileSystem getFileSystem(Cluster cluster)
+        throws FalconException, IOException {
+        Configuration conf = ClusterHelper.getConfiguration(cluster);
+        conf.setInt("ipc.client.connect.max.retries", 10);
+
+        return HadoopClientFactory.get().createFileSystem(conf);
+    }
+
     @Override
     public void onAdd(Entity entity, boolean ignoreFailure) throws FalconException {
         if (entity.getEntityType() != EntityType.CLUSTER) {

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java b/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
index 3f9256c..c19c89c 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/FalconPostProcessing.java
@@ -23,6 +23,7 @@ import org.apache.falcon.logging.LogMover;
 import org.apache.falcon.messaging.MessageProducer;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Logger;
@@ -61,7 +62,8 @@ public class FalconPostProcessing extends Configured implements Tool {
         WF_ENGINE_URL("workflowEngineUrl", "url of workflow engine server, ex:oozie"),
         USER_SUBFLOW_ID("subflowId", "external id of user workflow"),
         USER_WORKFLOW_ENGINE("userWorkflowEngine", "user workflow engine type"),
-        LOG_DIR("logDir", "log dir where job logs are copied");
+        LOG_DIR("logDir", "log dir where job logs are copied"),
+        WORKFLOW_USER("workflowUser", "user who owns the feed instance (partition)");
 
         private String name;
         private String description;
@@ -96,7 +98,7 @@ public class FalconPostProcessing extends Configured implements Tool {
         LOG.info("Sending user message " + cmd);
         invokeUserMessageProducer(cmd);
 
-        //LogMover doesnt throw exception, a failed logmover will not fail the user workflow
+        //LogMover doesn't throw exception, a failed log mover will not fail the user workflow
         LOG.info("Moving logs " + cmd);
         invokeLogProducer(cmd);
 
@@ -155,11 +157,18 @@ public class FalconPostProcessing extends Configured implements Tool {
         addArg(args, cmd, Arg.FEED_NAMES);
         addArg(args, cmd, Arg.FEED_INSTANCE_PATHS);
         addArg(args, cmd, Arg.LOG_FILE);
+        addArg(args, cmd, Arg.WORKFLOW_USER);
 
         MessageProducer.main(args.toArray(new String[0]));
     }
 
     private void invokeLogProducer(CommandLine cmd) throws Exception {
+        // todo: need to move this out to Falcon in-process
+        if (UserGroupInformation.isSecurityEnabled()) {
+            LOG.info("Unable to move logs as security is enabled.");
+            return;
+        }
+
         List<String> args = new ArrayList<String>();
         addArg(args, cmd, Arg.WF_ENGINE_URL);
         addArg(args, cmd, Arg.ENTITY_TYPE);
@@ -204,6 +213,8 @@ public class FalconPostProcessing extends Configured implements Tool {
         addOption(options, Arg.USER_SUBFLOW_ID);
         addOption(options, Arg.USER_WORKFLOW_ENGINE, false);
         addOption(options, Arg.LOG_DIR);
+        addOption(options, Arg.WORKFLOW_USER);
+
         return new GnuParser().parse(options, arguments);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
index b757531..d6dd2af 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieClientFactory.java
@@ -24,8 +24,7 @@ import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
 import org.apache.log4j.Logger;
-import org.apache.oozie.client.CustomOozieClient;
-import org.apache.oozie.client.OozieClient;
+import org.apache.oozie.client.ProxyOozieClient;
 
 import java.util.concurrent.ConcurrentHashMap;
 
@@ -35,42 +34,43 @@ import java.util.concurrent.ConcurrentHashMap;
 public final class OozieClientFactory {
 
     private static final Logger LOG = Logger.getLogger(OozieClientFactory.class);
-
-    private static final ConcurrentHashMap<String, OozieClient> CACHE =
-        new ConcurrentHashMap<String, OozieClient>();
     private static final String LOCAL_OOZIE = "local";
+
+    private static final ConcurrentHashMap<String, ProxyOozieClient> CACHE =
+            new ConcurrentHashMap<String, ProxyOozieClient>();
     private static volatile boolean localInitialized = false;
 
     private OozieClientFactory() {}
 
-    public static synchronized OozieClient get(Cluster cluster)
+    public static synchronized ProxyOozieClient get(Cluster cluster)
         throws FalconException {
 
         assert cluster != null : "Cluster cant be null";
         String oozieUrl = ClusterHelper.getOozieUrl(cluster);
         if (!CACHE.containsKey(oozieUrl)) {
-            OozieClient ref = getClientRef(oozieUrl);
+            ProxyOozieClient ref = getClientRef(oozieUrl);
             LOG.info("Caching Oozie client object for " + oozieUrl);
             CACHE.putIfAbsent(oozieUrl, ref);
         }
+
         return CACHE.get(oozieUrl);
     }
 
-    public static OozieClient get(String cluster) throws FalconException {
-        return get((Cluster) ConfigurationStore.get().get(EntityType.CLUSTER, cluster));
+    public static ProxyOozieClient get(String clusterName) throws FalconException {
+        return get((Cluster) ConfigurationStore.get().get(EntityType.CLUSTER, clusterName));
     }
 
-    private static OozieClient getClientRef(String oozieUrl)
+    private static ProxyOozieClient getClientRef(String oozieUrl)
         throws FalconException {
 
         if (LOCAL_OOZIE.equals(oozieUrl)) {
             return getLocalOozieClient();
         } else {
-            return new CustomOozieClient(oozieUrl);
+            return new ProxyOozieClient(oozieUrl);
         }
     }
 
-    private static OozieClient getLocalOozieClient() throws FalconException {
+    private static ProxyOozieClient getLocalOozieClient() throws FalconException {
         try {
             if (!localInitialized) {
                 //LocalOozie.start();

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
index 068e980..a930bb7 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieHouseKeepingService.java
@@ -24,6 +24,7 @@ import org.apache.falcon.entity.EntityUtil;
 import org.apache.falcon.entity.v0.Entity;
 import org.apache.falcon.entity.v0.EntityType;
 import org.apache.falcon.entity.v0.cluster.Cluster;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -56,7 +57,7 @@ public class OozieHouseKeepingService implements WorkflowEngineActionListener {
             LOG.info("Deleting entity path " + entityPath + " on cluster " + clusterName);
 
             Configuration conf = ClusterHelper.getConfiguration(cluster);
-            FileSystem fs = FileSystem.get(conf);
+            FileSystem fs = HadoopClientFactory.get().createFileSystem(conf);
             if (fs.exists(entityPath) && !fs.delete(entityPath, true)) {
                 throw new FalconException("Unable to cleanup entity path: " + entityPath);
             }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
index 71ff430..cea73bd 100644
--- a/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
+++ b/oozie/src/main/java/org/apache/falcon/workflow/engine/OozieWorkflowEngine.java
@@ -27,6 +27,7 @@ import org.apache.falcon.entity.store.ConfigurationStore;
 import org.apache.falcon.entity.v0.*;
 import org.apache.falcon.entity.v0.Frequency.TimeUnit;
 import org.apache.falcon.entity.v0.cluster.Cluster;
+import org.apache.falcon.hadoop.HadoopClientFactory;
 import org.apache.falcon.resource.APIResult;
 import org.apache.falcon.resource.InstancesResult;
 import org.apache.falcon.resource.InstancesResult.Instance;
@@ -54,8 +55,7 @@ import java.util.Map.Entry;
  */
 public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
-    private static final Logger LOG = Logger
-            .getLogger(OozieWorkflowEngine.class);
+    private static final Logger LOG = Logger.getLogger(OozieWorkflowEngine.class);
 
     public static final String ENGINE = "oozie";
     private static final BundleJob MISSING = new NullBundleJob();
@@ -130,8 +130,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
     private void commitStagingPath(String cluster, String path) throws FalconException {
         path = StringUtils.removeStart(path, "${nameNode}");
-        FileSystem fs =
-                ClusterHelper.getFileSystem((Cluster) ConfigurationStore.get().get(EntityType.CLUSTER, cluster));
+        Cluster clusterEntity = ConfigurationStore.get().get(EntityType.CLUSTER, cluster);
+        FileSystem fs = HadoopClientFactory.get().createFileSystem(ClusterHelper.getConfiguration(clusterEntity));
         try {
             fs.create(new Path(path, EntityUtil.SUCCEEDED_FILE_NAME)).close();
         } catch (IOException e) {
@@ -187,10 +187,10 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     }
 
     //Return all bundles for the entity in the requested cluster
-    private List<BundleJob> findBundles(Entity entity, String cluster) throws FalconException {
+    private List<BundleJob> findBundles(Entity entity, String clusterName) throws FalconException {
+
         try {
-            OozieClient client = OozieClientFactory.get(cluster);
-            List<BundleJob> jobs = client.getBundleJobsInfo(
+            List<BundleJob> jobs = OozieClientFactory.get(clusterName).getBundleJobsInfo(
                     OozieClient.FILTER_NAME + "=" + EntityUtil.getWorkflowName(entity) + ";", 0, 256);
             if (jobs != null) {
                 List<BundleJob> filteredJobs = new ArrayList<BundleJob>();
@@ -317,22 +317,23 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         return "SUCCESS";
     }
 
-    private void killBundle(String cluster, BundleJob job) throws FalconException {
-        OozieClient client = OozieClientFactory.get(cluster);
+    private void killBundle(String clusterName, BundleJob job) throws FalconException {
+        ProxyOozieClient client = OozieClientFactory.get(clusterName);
         try {
             //kill all coords
             for (CoordinatorJob coord : job.getCoordinators()) {
                 client.kill(coord.getId());
-                LOG.debug("Killed coord " + coord.getId() + " on cluster " + cluster);
+                LOG.debug("Killed coord " + coord.getId() + " on cluster " + clusterName);
             }
 
             //set end time of bundle
-            client.change(job.getId(), OozieClient.CHANGE_VALUE_ENDTIME + "=" + SchemaHelper.formatDateUTC(new Date()));
-            LOG.debug("Changed end time of bundle " + job.getId() + " on cluster " + cluster);
+            client.change(job.getId(),
+                    OozieClient.CHANGE_VALUE_ENDTIME + "=" + SchemaHelper.formatDateUTC(new Date()));
+            LOG.debug("Changed end time of bundle " + job.getId() + " on cluster " + clusterName);
 
             //kill bundle
             client.kill(job.getId());
-            LOG.debug("Killed bundle " + job.getId() + " on cluster " + cluster);
+            LOG.debug("Killed bundle " + job.getId() + " on cluster " + clusterName);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -383,7 +384,6 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     @Override
     public InstancesResult getRunningInstances(Entity entity)
         throws FalconException {
-
         try {
             WorkflowBuilder<Entity> builder = WorkflowBuilder.getBuilder(
                     ENGINE, entity);
@@ -400,7 +400,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             }
 
             for (String cluster : clusters) {
-                OozieClient client = OozieClientFactory.get(cluster);
+                ProxyOozieClient client = OozieClientFactory.get(cluster);
                 List<WorkflowJob> wfs = getRunningWorkflows(cluster, coordNames);
                 if (wfs != null) {
                     for (WorkflowJob job : wfs) {
@@ -476,10 +476,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
     private WorkflowJob getWorkflowInfo(String cluster, String wfId)
         throws FalconException {
-
-        OozieClient client = OozieClientFactory.get(cluster);
         try {
-            return client.getJobInfo(wfId);
+            return OozieClientFactory.get(cluster).getJobInfo(wfId);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -556,7 +554,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
             }
 
             List<BundleJob> bundles = entry.getValue();
-            OozieClient client = OozieClientFactory.get(cluster);
+            ProxyOozieClient client = OozieClientFactory.get(cluster);
             List<CoordinatorJob> applicableCoords = getApplicableCoords(entity, client, start, end, bundles);
             long unscheduledInstances = 0;
             boolean isLastCoord = false;
@@ -725,18 +723,14 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                  + Arrays.toString(statuses));
     }
 
-    private String getSourceCluster(String cluster,
-                                    CoordinatorAction coordinatorAction, Entity entity)
+    private String getSourceCluster(String cluster, CoordinatorAction coordinatorAction, Entity entity)
         throws FalconException {
-
-        OozieClient client = OozieClientFactory.get(cluster);
-        CoordinatorJob coordJob;
         try {
-            coordJob = client.getCoordJobInfo(coordinatorAction.getJobId());
+            CoordinatorJob coordJob = OozieClientFactory.get(cluster).getCoordJobInfo(coordinatorAction.getJobId());
+            return EntityUtil.getWorkflowNameSuffix(coordJob.getAppName(), entity);
         } catch (OozieClientException e) {
             throw new FalconException("Unable to get oozie job id:" + e);
         }
-        return EntityUtil.getWorkflowNameSuffix(coordJob.getAppName(), entity);
     }
 
     private List<String> getIncludedClusters(Properties props,
@@ -776,7 +770,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         for (Map.Entry<String, List<BundleJob>> entry : bundlesMap.entrySet()) {
             String cluster = entry.getKey();
             List<BundleJob> bundles = entry.getValue();
-            OozieClient client = OozieClientFactory.get(cluster);
+            ProxyOozieClient client = OozieClientFactory.get(cluster);
             List<CoordinatorJob> applicableCoords = getApplicableCoords(entity, client, start, end, bundles);
             List<CoordinatorAction> actions = new ArrayList<CoordinatorAction>();
 
@@ -836,8 +830,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         }
     }
 
-    private List<CoordinatorJob> getApplicableCoords(Entity entity,
-                                                     OozieClient client, Date start, Date end, List<BundleJob> bundles)
+    private List<CoordinatorJob> getApplicableCoords(Entity entity, ProxyOozieClient client,
+                                                     Date start, Date end, List<BundleJob> bundles)
         throws FalconException {
 
         List<CoordinatorJob> applicableCoords = new ArrayList<CoordinatorJob>();
@@ -1113,10 +1107,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
     private BundleJob getBundleInfo(String cluster, String bundleId)
         throws FalconException {
-
-        OozieClient client = OozieClientFactory.get(cluster);
         try {
-            return client.getBundleJobInfo(bundleId);
+            return OozieClientFactory.get(cluster).getBundleJobInfo(bundleId);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1132,9 +1124,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
                     .append(wfName);
         }
 
-        OozieClient client = OozieClientFactory.get(cluster);
         try {
-            return client.getJobsInfo(filter.toString(), 1, 1000);
+            return OozieClientFactory.get(cluster).getJobsInfo(filter.toString(), 1, 1000);
         } catch (OozieClientException e) {
             throw new FalconException(e);
         }
@@ -1144,7 +1135,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     public void reRun(String cluster, String jobId, Properties props)
         throws FalconException {
 
-        OozieClient client = OozieClientFactory.get(cluster);
+        ProxyOozieClient client = OozieClientFactory.get(cluster);
         try {
             WorkflowJob jobInfo = client.getJobInfo(jobId);
             Properties jobprops = OozieUtils.toProperties(jobInfo.getConf());
@@ -1200,7 +1191,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     public String getWorkflowStatus(String cluster, String jobId)
         throws FalconException {
 
-        OozieClient client = OozieClientFactory.get(cluster);
+        ProxyOozieClient client = OozieClientFactory.get(cluster);
         try {
             if (jobId.endsWith("-W")) {
                 WorkflowJob jobInfo = client.getJobInfo(jobId);
@@ -1231,9 +1222,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     }
 
     private String run(String cluster, Properties props) throws FalconException {
-        OozieClient client = OozieClientFactory.get(cluster);
         try {
-            String jobId = client.run(props);
+            String jobId = OozieClientFactory.get(cluster).run(props);
             LOG.info("Submitted " + jobId + " on cluster " + cluster
                     + " with properties : " + props);
             return jobId;
@@ -1244,9 +1234,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     }
 
     private void suspend(String cluster, String jobId) throws FalconException {
-        OozieClient client = OozieClientFactory.get(cluster);
         try {
-            client.suspend(jobId);
+            OozieClientFactory.get(cluster).suspend(jobId);
             assertStatus(cluster, jobId, Status.PREPSUSPENDED, Status.SUSPENDED, Status.SUCCEEDED,
                     Status.FAILED, Status.KILLED);
             LOG.info("Suspended job " + jobId + " on cluster " + cluster);
@@ -1256,9 +1245,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     }
 
     private void resume(String cluster, String jobId) throws FalconException {
-        OozieClient client = OozieClientFactory.get(cluster);
         try {
-            client.resume(jobId);
+            OozieClientFactory.get(cluster).resume(jobId);
             assertStatus(cluster, jobId, Status.RUNNING, Status.SUCCEEDED,
                     Status.FAILED, Status.KILLED);
             LOG.info("Resumed job " + jobId + " on cluster " + cluster);
@@ -1268,9 +1256,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     }
 
     private void kill(String cluster, String jobId) throws FalconException {
-        OozieClient client = OozieClientFactory.get(cluster);
         try {
-            client.kill(jobId);
+            OozieClientFactory.get(cluster).kill(jobId);
             assertStatus(cluster, jobId, Status.KILLED, Status.SUCCEEDED,
                     Status.FAILED);
             LOG.info("Killed job " + jobId + " on cluster " + cluster);
@@ -1281,10 +1268,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
     private void change(String cluster, String jobId, String changeValue)
         throws FalconException {
-
         try {
-            OozieClient client = OozieClientFactory.get(cluster);
-            client.change(jobId, changeValue);
+            OozieClientFactory.get(cluster).change(jobId, changeValue);
             LOG.info("Changed bundle/coord " + jobId + ": " + changeValue
                     + " on cluster " + cluster);
         } catch (OozieClientException e) {
@@ -1317,7 +1302,7 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
         // assert that its really changed
         try {
-            OozieClient client = OozieClientFactory.get(cluster);
+            ProxyOozieClient client = OozieClientFactory.get(cluster);
             CoordinatorJob coord = client.getCoordJobInfo(id);
             for (int counter = 0; counter < 3; counter++) {
                 Date intendedPauseTime = (StringUtils.isEmpty(pauseTime) ? null
@@ -1348,9 +1333,8 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
 
     @Override
     public Properties getWorkflowProperties(String cluster, String jobId) throws FalconException {
-        OozieClient client = OozieClientFactory.get(cluster);
         try {
-            WorkflowJob jobInfo = client.getJobInfo(jobId);
+            WorkflowJob jobInfo = OozieClientFactory.get(cluster).getJobInfo(jobId);
             String conf = jobInfo.getConf();
             return OozieUtils.toProperties(conf);
         } catch (Exception e) {
@@ -1361,12 +1345,10 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
     @Override
     public InstancesResult getJobDetails(String cluster, String jobId)
         throws FalconException {
-
-        OozieClient client = OozieClientFactory.get(cluster);
         Instance[] instances = new Instance[1];
         Instance instance = new Instance();
         try {
-            WorkflowJob jobInfo = client.getJobInfo(jobId);
+            WorkflowJob jobInfo = OozieClientFactory.get(cluster).getJobInfo(jobId);
             instance.startTime = jobInfo.getStartTime();
             if (jobInfo.getStatus().name().equals(Status.RUNNING.name())) {
                 instance.endTime = new Date();
@@ -1380,6 +1362,5 @@ public class OozieWorkflowEngine extends AbstractWorkflowEngine {
         } catch (Exception e) {
             throw new FalconException(e);
         }
-
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/oozie/client/CustomOozieClient.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/oozie/client/CustomOozieClient.java b/oozie/src/main/java/org/apache/oozie/client/CustomOozieClient.java
deleted file mode 100644
index c55221e..0000000
--- a/oozie/src/main/java/org/apache/oozie/client/CustomOozieClient.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.oozie.client;
-
-import org.apache.falcon.security.CurrentUser;
-import org.apache.falcon.util.RuntimeProperties;
-import org.apache.log4j.Logger;
-import org.apache.oozie.client.rest.RestConstants;
-import org.json.simple.JSONObject;
-import org.json.simple.JSONValue;
-
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * Wrapped Oozie Client.
- */
-public class CustomOozieClient extends OozieClient {
-
-    private static final Logger LOG = Logger.getLogger(CustomOozieClient.class);
-    private static final Map<String, String> NONE = new HashMap<String, String>();
-
-    public CustomOozieClient(String oozieUrl) {
-        super(oozieUrl);
-    }
-
-    public Properties getConfiguration() throws OozieClientException {
-        return (new OozieConfiguration(RestConstants.ADMIN_CONFIG_RESOURCE)).call();
-    }
-
-    public Properties getProperties() throws OozieClientException {
-        return (new OozieConfiguration(RestConstants.ADMIN_JAVA_SYS_PROPS_RESOURCE)).call();
-    }
-
-    @Override
-    protected HttpURLConnection createConnection(URL url, String method) throws IOException, OozieClientException {
-        String strUrl = url.toString();
-        if (!strUrl.contains(OozieClient.USER_NAME)) { // decorate the url with the user in request
-            String paramSeparator = (strUrl.contains("?")) ? "&" : "?";
-            strUrl += paramSeparator + OozieClient.USER_NAME + "=" + CurrentUser.getUser();
-            url = new URL(strUrl);
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("Decorated url with user info: " + url);
-            }
-        }
-
-        HttpURLConnection conn = super.createConnection(url, method);
-
-        int connectTimeout = Integer.valueOf(RuntimeProperties.get().getProperty("oozie.connect.timeout", "1000"));
-        conn.setConnectTimeout(connectTimeout);
-
-        int readTimeout = Integer.valueOf(RuntimeProperties.get().getProperty("oozie.read.timeout", "45000"));
-        conn.setReadTimeout(readTimeout);
-
-        return conn;
-    }
-
-    private class OozieConfiguration extends ClientCallable<Properties> {
-
-        public OozieConfiguration(String resource) {
-            super("GET", RestConstants.ADMIN, resource, NONE);
-        }
-
-        @Override
-        protected Properties call(HttpURLConnection conn) throws IOException, OozieClientException {
-            conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
-            if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
-                Reader reader = new InputStreamReader(conn.getInputStream(), "UTF_8");
-                JSONObject json = (JSONObject) JSONValue.parse(reader);
-                Properties props = new Properties();
-                props.putAll(json);
-                return props;
-            } else {
-                handleError(conn);
-                return null;
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
----------------------------------------------------------------------
diff --git a/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java b/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
new file mode 100644
index 0000000..c78a83a
--- /dev/null
+++ b/oozie/src/main/java/org/apache/oozie/client/ProxyOozieClient.java
@@ -0,0 +1,562 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.client;
+
+import org.apache.falcon.security.CurrentUser;
+import org.apache.falcon.security.SecurityUtil;
+import org.apache.falcon.util.RuntimeProperties;
+import org.apache.hadoop.hdfs.web.KerberosUgiAuthenticator;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.Authenticator;
+import org.apache.log4j.Logger;
+import org.apache.oozie.client.rest.RestConstants;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.io.Reader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.Callable;
+
+/**
+ * Wrapped Oozie Client that does proxy the requests.
+ */
+public class ProxyOozieClient extends AuthOozieClient {
+
+    private static final Logger LOG = Logger.getLogger(ProxyOozieClient.class);
+    private static final Map<String, String> NONE = new HashMap<String, String>();
+
+    private final Authenticator authenticator = new KerberosUgiAuthenticator();
+
+    public ProxyOozieClient(String oozieUrl) {
+        super(oozieUrl, SecurityUtil.getAuthenticationType());
+
+        if (org.apache.log4j.Logger.getLogger(getClass()).isDebugEnabled()) {
+            setDebugMode(1);
+        }
+    }
+
+    public Properties getConfiguration() throws OozieClientException {
+        return (new OozieConfiguration(RestConstants.ADMIN_CONFIG_RESOURCE)).call();
+    }
+
+    public Properties getProperties() throws OozieClientException {
+        return (new OozieConfiguration(RestConstants.ADMIN_JAVA_SYS_PROPS_RESOURCE)).call();
+    }
+
+    @Override
+    protected Authenticator getAuthenticator() throws OozieClientException {
+        return authenticator;
+    }
+
+    @Override
+    protected HttpURLConnection createConnection(URL url, final String method)
+        throws IOException, OozieClientException {
+
+        final URL decoratedUrl = decorateUrlWithUser(url);
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("ProxyOozieClient.createConnection: u=" + url + ", m=" + method);
+        }
+
+        UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
+        try {
+            return currentUser.doAs(new PrivilegedExceptionAction<HttpURLConnection>() {
+                public HttpURLConnection run() throws Exception {
+                    HttpURLConnection conn = ProxyOozieClient.super.createConnection(decoratedUrl, method);
+
+                    int connectTimeout = Integer.valueOf(
+                            RuntimeProperties.get().getProperty("oozie.connect.timeout", "1000"));
+                    conn.setConnectTimeout(connectTimeout);
+
+                    int readTimeout = Integer.valueOf(
+                            RuntimeProperties.get().getProperty("oozie.read.timeout", "45000"));
+                    conn.setReadTimeout(readTimeout);
+
+                    return conn;
+                }
+            });
+        } catch (InterruptedException e) {
+            throw new IOException("Could not connect to oozie: " + e.getMessage(), e);
+        }
+    }
+
+    protected URL decorateUrlWithUser(URL url) throws IOException {
+        String strUrl = url.toString();
+
+        if (!strUrl.contains(OozieClient.USER_NAME)) {
+            // decorate the url with the proxy user in request
+            String paramSeparator = (strUrl.contains("?")) ? "&" : "?";
+            strUrl += paramSeparator + OozieClient.USER_NAME + "="
+                    + UserGroupInformation.getLoginUser().getUserName();
+            // strUrl += "&" + RestConstants.DO_AS_PARAM + "=" + CurrentUser.getUser();
+
+            url = new URL(strUrl);
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Decorated url with user info: " + url);
+            }
+        }
+
+        return url;
+    }
+
+    private class OozieConfiguration extends ClientCallable<Properties> {
+
+        public OozieConfiguration(String resource) {
+            super("GET", RestConstants.ADMIN, resource, NONE);
+        }
+
+        @Override
+        protected Properties call(HttpURLConnection conn)
+            throws IOException, OozieClientException {
+            conn.setRequestProperty("content-type", RestConstants.XML_CONTENT_TYPE);
+            if ((conn.getResponseCode() == HttpURLConnection.HTTP_OK)) {
+                Reader reader = new InputStreamReader(conn.getInputStream(), "UTF_8");
+                JSONObject json = (JSONObject) JSONValue.parse(reader);
+                Properties props = new Properties();
+                props.putAll(json);
+                return props;
+            } else {
+                handleError(conn);
+                return null;
+            }
+        }
+    }
+
+    @Override
+    public SYSTEM_MODE getSystemMode() throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<SYSTEM_MODE>() {
+
+                public SYSTEM_MODE call() throws Exception {
+                    return ProxyOozieClient.super.getSystemMode();
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public String submit(final Properties conf) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<String>() {
+
+                public String call() throws Exception {
+                    return ProxyOozieClient.super.submit(conf);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public String dryrun(final Properties conf) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<String>() {
+
+                public String call() throws Exception {
+                    return ProxyOozieClient.super.dryrun(conf);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+
+    }
+
+    @Override
+    public void start(final String jobId) throws OozieClientException {
+        try {
+            doAs(CurrentUser.getUser(), new Callable<Object>() {
+
+                public String call() throws Exception {
+                    ProxyOozieClient.super.start(jobId);
+                    return null;
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public String run(final Properties conf) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<String>() {
+
+                public String call() throws Exception {
+                    return ProxyOozieClient.super.run(conf);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public void reRun(final String jobId, final Properties conf) throws OozieClientException {
+        try {
+            doAs(CurrentUser.getUser(), new Callable<Object>() {
+
+                public Object call() throws Exception {
+                    ProxyOozieClient.super.reRun(jobId, conf);
+                    return null;
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public void suspend(final String jobId) throws OozieClientException {
+        try {
+            doAs(CurrentUser.getUser(), new Callable<Object>() {
+
+                public Object call() throws Exception {
+                    ProxyOozieClient.super.suspend(jobId);
+                    return null;
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public void resume(final String jobId) throws OozieClientException {
+        try {
+            doAs(CurrentUser.getUser(), new Callable<Object>() {
+
+                public Object call() throws Exception {
+                    ProxyOozieClient.super.resume(jobId);
+                    return null;
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public void kill(final String jobId) throws OozieClientException {
+        try {
+            doAs(CurrentUser.getUser(), new Callable<Object>() {
+
+                public Object call() throws Exception {
+                    ProxyOozieClient.super.kill(jobId);
+                    return null;
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public void change(final String jobId, final String changeValue) throws OozieClientException {
+        try {
+            doAs(CurrentUser.getUser(), new Callable<Object>() {
+
+                public Object call() throws Exception {
+                    ProxyOozieClient.super.change(jobId, changeValue);
+                    return null;
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public WorkflowJob getJobInfo(final String jobId) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<WorkflowJob>() {
+
+                public WorkflowJob call() throws Exception {
+                    return ProxyOozieClient.super.getJobInfo(jobId);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public WorkflowJob getJobInfo(final String jobId, final int start, final int len)
+        throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<WorkflowJob>() {
+
+                public WorkflowJob call() throws Exception {
+                    return ProxyOozieClient.super.getJobInfo(jobId, start, len);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public WorkflowAction getWorkflowActionInfo(final String actionId)
+        throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<WorkflowAction>() {
+
+                public WorkflowAction call() throws Exception {
+                    return ProxyOozieClient.super.getWorkflowActionInfo(actionId);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public String getJobLog(final String jobId) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<String>() {
+
+                public String call() throws Exception {
+                    return ProxyOozieClient.super.getJobLog(jobId);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public void getJobLog(final String jobId, final String logRetrievalType,
+                          final String logRetrievalScope, final PrintStream ps)
+        throws OozieClientException {
+        try {
+            doAs(CurrentUser.getUser(), new Callable<Object>() {
+
+                public Object call() throws Exception {
+                    ProxyOozieClient.super.getJobLog(jobId, logRetrievalType, logRetrievalScope, ps);
+                    return null;
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public String getJobDefinition(final String jobId) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<String>() {
+
+                public String call() throws Exception {
+                    return ProxyOozieClient.super.getJobDefinition(jobId);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public BundleJob getBundleJobInfo(final String jobId) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<BundleJob>() {
+
+                public BundleJob call() throws Exception {
+                    return ProxyOozieClient.super.getBundleJobInfo(jobId);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public CoordinatorJob getCoordJobInfo(final String jobId) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<CoordinatorJob>() {
+
+                public CoordinatorJob call() throws Exception {
+                    return ProxyOozieClient.super.getCoordJobInfo(jobId);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public CoordinatorJob getCoordJobInfo(final String jobId, final String filter,
+                                          final int start, final int len)
+        throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<CoordinatorJob>() {
+
+                public CoordinatorJob call() throws Exception {
+                    return ProxyOozieClient.super.getCoordJobInfo(jobId, filter, start, len);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public CoordinatorAction getCoordActionInfo(final String actionId) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<CoordinatorAction>() {
+
+                public CoordinatorAction call() throws Exception {
+                    return ProxyOozieClient.super.getCoordActionInfo(actionId);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public List<CoordinatorAction> reRunCoord(final String jobId, final String rerunType,
+                                              final String scope, final boolean refresh,
+                                              final boolean noCleanup)
+        throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<List<CoordinatorAction>>() {
+
+                public List<CoordinatorAction> call() throws Exception {
+                    return ProxyOozieClient.super.reRunCoord(jobId, rerunType, scope, refresh, noCleanup);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public Void reRunBundle(final String jobId, final String coordScope, final String dateScope,
+                            final boolean refresh, final boolean noCleanup)
+        throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<Void>() {
+
+                public Void call() throws Exception {
+                    return ProxyOozieClient.super.reRunBundle(jobId, coordScope, dateScope, refresh, noCleanup);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public List<WorkflowJob> getJobsInfo(final String filter, final int start, final int len)
+        throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<List<WorkflowJob>>() {
+
+                public List<WorkflowJob> call() throws Exception {
+                    return ProxyOozieClient.super.getJobsInfo(filter, start, len);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public List<WorkflowJob> getJobsInfo(final String filter) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<List<WorkflowJob>>() {
+
+                public List<WorkflowJob> call() throws Exception {
+                    return ProxyOozieClient.super.getJobsInfo(filter);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public void getSlaInfo(final int start, final int len, final String filter) throws OozieClientException {
+        try {
+            doAs(CurrentUser.getUser(), new Callable<Object>() {
+
+                public Object call() throws Exception {
+                    ProxyOozieClient.super.getSlaInfo(start, len, filter);
+                    return null;
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public String getJobId(final String externalId) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<String>() {
+
+                public String call() throws Exception {
+                    return ProxyOozieClient.super.getJobId(externalId);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public List<CoordinatorJob> getCoordJobsInfo(final String filter, final int start,
+                                                 final int len) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<List<CoordinatorJob>>() {
+
+                public List<CoordinatorJob> call() throws Exception {
+                    return ProxyOozieClient.super.getCoordJobsInfo(filter, start, len);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+
+    @Override
+    public List<BundleJob> getBundleJobsInfo(final String filter, final int start,
+                                             final int len) throws OozieClientException {
+        try {
+            return doAs(CurrentUser.getUser(), new Callable<List<BundleJob>>() {
+                public List<BundleJob> call() throws Exception {
+                    return ProxyOozieClient.super.getBundleJobsInfo(filter, start, len);
+                }
+            });
+        } catch (Exception e) {
+            throw new OozieClientException(OozieClientException.AUTHENTICATION, e);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
----------------------------------------------------------------------
diff --git a/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java b/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
index c6485cd..871c63f 100644
--- a/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
+++ b/oozie/src/test/java/org/apache/falcon/oozie/workflow/FalconPostProcessingTest.java
@@ -52,6 +52,7 @@ public class FalconPostProcessingTest {
                             "-" + Arg.FEED_INSTANCE_PATHS.getOptionName(),
                             "/click-logs/10/05/05/00/20,/raw-logs/10/05/05/00/20",
                             "-" + Arg.WORKFLOW_ID.getOptionName(), "workflow-01-00",
+                            "-" + Arg.WORKFLOW_USER.getOptionName(), "falcon",
                             "-" + Arg.RUN_ID.getOptionName(), "1",
                             "-" + Arg.NOMINAL_TIME.getOptionName(), "2011-01-01-01-00",
                             "-" + Arg.TIMESTAMP.getOptionName(), "2012-01-01-01-00",
@@ -150,6 +151,10 @@ public class FalconPostProcessingTest {
                 "agg-coord");
         Assert.assertEquals(m.getString(Arg.WORKFLOW_ID.getOptionName()),
                 "workflow-01-00");
+        String workflowUser = m.getString(Arg.WORKFLOW_USER.getOptionName());
+        if (workflowUser != null) { // in case of user message, its NULL
+            Assert.assertEquals(workflowUser, "falcon");
+        }
         Assert.assertEquals(m.getString(Arg.RUN_ID.getOptionName()), "1");
         Assert.assertEquals(m.getString(Arg.NOMINAL_TIME.getOptionName()),
                 "2011-01-01T01:00Z");

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index fca01b5..d647a62 100644
--- a/pom.xml
+++ b/pom.xml
@@ -941,11 +941,16 @@
 
             <!--  this is needed for embedded oozie -->
             <dependency>
-                <groupId>org.apache.hive</groupId>
-                <artifactId>hive-exec</artifactId>
-                <version>${hive.version}</version>
+                <groupId>org.apache.hcatalog</groupId>
+                <artifactId>webhcat-java-client</artifactId>
+                <version>${hcatalog.version}</version>
                 <exclusions>
                     <exclusion>
+                        <!-- This implies you cannot use orc files -->
+                        <groupId>com.google.protobuf</groupId>
+                        <artifactId>protobuf-java</artifactId>
+                    </exclusion>
+                    <exclusion>
                         <groupId>org.apache.hbase</groupId>
                         <artifactId>hbase-server</artifactId>
                     </exclusion>
@@ -961,12 +966,6 @@
             </dependency>
 
             <dependency>
-                <groupId>org.apache.hcatalog</groupId>
-                <artifactId>webhcat-java-client</artifactId>
-                <version>${hcatalog.version}</version>
-            </dependency>
-
-            <dependency>
                 <groupId>net.sourceforge.findbugs</groupId>
                 <artifactId>annotations</artifactId>
                 <version>1.3.2</version>

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java b/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
index 2ba76a7..cf556e5 100644
--- a/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
+++ b/prism/src/main/java/org/apache/falcon/resource/channel/HTTPChannel.java
@@ -29,7 +29,14 @@ import org.apache.falcon.util.RuntimeProperties;
 import org.apache.log4j.Logger;
 
 import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.HttpMethod;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import javax.ws.rs.core.Response.Status.Family;
@@ -44,8 +51,6 @@ import java.util.Properties;
 public class HTTPChannel extends AbstractChannel {
     private static final Logger LOG = Logger.getLogger(HTTPChannel.class);
 
-    private static final String REMOTE_USER = "Remote-User";
-
     private static final HttpServletRequest DEFAULT_NULL_REQUEST = new NullServletRequest();
 
     private static final Properties DEPLOYMENT_PROPERTIES = DeploymentProperties.get();
@@ -84,8 +89,9 @@ public class HTTPChannel extends AbstractChannel {
 
             ClientResponse response = Client.create(new DefaultClientConfig())
                     .resource(UriBuilder.fromUri(url).build())
-                    .header(REMOTE_USER, user).accept(accept)
-                    .type(mimeType).method(httpMethod, ClientResponse.class,
+                    .queryParam("user.name", user)
+                    .accept(accept).type(mimeType)
+                    .method(httpMethod, ClientResponse.class,
                             (isPost(httpMethod) ? incomingRequest.getInputStream() : null));
             incomingRequest.getInputStream().reset();
 
@@ -186,12 +192,4 @@ public class HTTPChannel extends AbstractChannel {
         }
         return consumes.value()[0];
     }
-
-    private String getProduces(Method method) {
-        Produces produces = method.getAnnotation(Produces.class);
-        if (produces.value() == null) {
-            return MediaType.WILDCARD;
-        }
-        return produces.value()[0];
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java b/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
index f172e82..b4b544c 100644
--- a/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
+++ b/prism/src/main/java/org/apache/falcon/security/BasicAuthFilter.java
@@ -18,82 +18,188 @@
 
 package org.apache.falcon.security;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.falcon.util.StartupProperties;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 import org.apache.log4j.Logger;
 import org.apache.log4j.NDC;
 
-import javax.servlet.*;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.ws.rs.core.Response;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashSet;
+import java.util.Map;
+import java.util.Properties;
 import java.util.Set;
 import java.util.UUID;
 
 /**
  * This enforces authentication as part of the filter before processing the request.
+ * Subclass of {@link AuthenticationFilter}.
  */
-public class BasicAuthFilter implements Filter {
+public class BasicAuthFilter extends AuthenticationFilter {
 
     private static final Logger LOG = Logger.getLogger(BasicAuthFilter.class);
 
-    private static final String GUEST = "guest";
+    /**
+     * Constant for the configuration property that indicates the prefix.
+     */
+    private static final String FALCON_PREFIX = "falcon.http.authentication.";
 
-    private static final Set<String> BLACK_LISTED_USER = new HashSet<String>(
-            Arrays.asList(new String[]{"hdfs", "mapred", "oozie", "falcon"}));
+    /**
+     * Constant for the configuration property that indicates the blacklisted super users for falcon.
+     */
+    private static final String BLACK_LISTED_USERS_KEY = FALCON_PREFIX + "blacklisted.users";
 
-    private boolean isSecure;
+    /**
+     * An options servlet is used to authenticate users. OPTIONS method is used for triggering authentication
+     * before invoking the actual resource.
+     */
+    private HttpServlet optionsServlet;
+    private Set<String> blackListedUsers;
 
+    /**
+     * Initialize the filter.
+     *
+     * @param filterConfig filter configuration.
+     * @throws ServletException thrown if the filter could not be initialized.
+     */
     @Override
     public void init(FilterConfig filterConfig) throws ServletException {
-        String secure = StartupProperties.get().getProperty("security.enabled", "true");
-        this.isSecure = Boolean.parseBoolean(secure);
+        LOG.info("BasicAuthFilter initialization started");
+        super.init(filterConfig);
+
+        optionsServlet = new HttpServlet() {};
+        optionsServlet.init();
+
+        initializeBlackListedUsers();
     }
 
+    private void initializeBlackListedUsers() {
+        blackListedUsers = new HashSet<String>();
+        String blackListedUserConfig = StartupProperties.get().getProperty(BLACK_LISTED_USERS_KEY);
+        if (!StringUtils.isEmpty(blackListedUserConfig)) {
+            blackListedUsers.addAll(Arrays.asList(blackListedUserConfig.split(",")));
+        }
+    }
+
+    /**
+     * Returns the configuration from Oozie configuration to be used by the authentication filter.
+     * <p/>
+     * All properties from Oozie configuration which name starts with {@link #FALCON_PREFIX} will
+     * be returned. The keys of the returned properties are trimmed from the {@link #FALCON_PREFIX}
+     * prefix, for example the Oozie configuration property name 'oozie.authentication.type' will
+     * be just 'type'.
+     *
+     * @param configPrefix configuration prefix, this parameter is ignored by this implementation.
+     * @param filterConfig filter configuration, this parameter is ignored by this implementation.
+     * @return all Oozie configuration properties prefixed with {@link #FALCON_PREFIX}, without the
+     * prefix.
+     */
     @Override
-    public void doFilter(ServletRequest request,
-                         ServletResponse response,
-                         FilterChain chain) throws IOException, ServletException {
+    protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) {
+        Properties authProperties = new Properties();
+        Properties configProperties = StartupProperties.get();
+
+        // setting the cookie path to root '/' so it is used for all resources.
+        authProperties.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
 
-        if (!(request instanceof HttpServletRequest) || !(response instanceof HttpServletResponse)) {
-            throw new IllegalStateException("Invalid request/response object");
+        for (Map.Entry entry : configProperties.entrySet()) {
+            String name = (String) entry.getKey();
+            if (name.startsWith(FALCON_PREFIX)) {
+                String value = (String) entry.getValue();
+                name = name.substring(FALCON_PREFIX.length());
+                authProperties.setProperty(name, value);
+            }
         }
-        HttpServletRequest httpRequest = (HttpServletRequest) request;
-        HttpServletResponse httpResponse = (HttpServletResponse) response;
 
-        String user;
-        String requestId = UUID.randomUUID().toString();
+        return authProperties;
+    }
 
-        if (!isSecure) {
-            user = GUEST;
-        } else {
-            user = httpRequest.getHeader("Remote-User");
-        }
+    @Override
+    public void doFilter(final ServletRequest request, final ServletResponse response,
+                         final FilterChain filterChain) throws IOException, ServletException {
+
+        FilterChain filterChainWrapper = new FilterChain() {
+
+            @Override
+            public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse)
+                throws IOException, ServletException {
+                HttpServletRequest httpRequest = (HttpServletRequest) servletRequest;
+
+                if (httpRequest.getMethod().equals("OPTIONS")) { // option request meant only for authentication
+                    optionsServlet.service(request, response);
+                } else {
+                    final String user = getUserFromRequest(httpRequest);
+                    if (StringUtils.isEmpty(user)) {
+                        ((HttpServletResponse) response).sendError(Response.Status.BAD_REQUEST.getStatusCode(),
+                                "User can't be empty");
+                    } else if (blackListedUsers.contains(user)) {
+                        ((HttpServletResponse) response).sendError(Response.Status.BAD_REQUEST.getStatusCode(),
+                                "User can't be a superuser:" + BLACK_LISTED_USERS_KEY);
+                    } else {
+                        try {
+                            String requestId = UUID.randomUUID().toString();
+                            NDC.push(user + ":" + httpRequest.getMethod() + "/" + httpRequest.getPathInfo());
+                            NDC.push(requestId);
+                            CurrentUser.authenticate(user);
+                            LOG.info("Request from user: " + user + ", URL=" + getRequestUrl(httpRequest));
 
-        if (user == null || user.isEmpty()) {
-            httpResponse.sendError(Response.Status.BAD_REQUEST.getStatusCode(),
-                    "Remote user header can't be empty");
-        } else if (BLACK_LISTED_USER.contains(user)) {
-            httpResponse.sendError(Response.Status.BAD_REQUEST.getStatusCode(),
-                    "Remote user header can't be superusers:" + BLACK_LISTED_USER);
-        } else {
-            CurrentUser.authenticate(user);
-            try {
-                NDC.push(user + ":" + httpRequest.getMethod() + "/" + httpRequest.getPathInfo());
-                NDC.push(requestId);
-                LOG.info("Request from user: " + user + ", path=" + httpRequest.getPathInfo()
-                        + ", query=" + httpRequest.getQueryString());
-                chain.doFilter(request, response);
-            } finally {
-                NDC.pop();
-                NDC.pop();
+                            filterChain.doFilter(servletRequest, servletResponse);
+                        } finally {
+                            NDC.pop();
+                            NDC.pop();
+                        }
+                    }
+                }
             }
-        }
+
+            private String getUserFromRequest(HttpServletRequest httpRequest) {
+                String user = httpRequest.getRemoteUser(); // this is available from wrapper in super class
+                if (!StringUtils.isEmpty(user)) {
+                    return user;
+                }
+
+                user = httpRequest.getParameter("user.name"); // available in query-param
+                if (!StringUtils.isEmpty(user)) {
+                    return user;
+                }
+
+                user = httpRequest.getHeader("Remote-User"); // backwards-compatibility
+                if (!StringUtils.isEmpty(user)) {
+                    return user;
+                }
+
+                return null;
+            }
+
+            private String getRequestUrl(HttpServletRequest request) {
+                StringBuffer url = request.getRequestURL();
+                if (request.getQueryString() != null) {
+                    url.append("?").append(request.getQueryString());
+                }
+
+                return url.toString();
+            }
+        };
+
+        super.doFilter(request, response, filterChainWrapper);
     }
 
     @Override
     public void destroy() {
+        if (optionsServlet != null) {
+            optionsServlet.destroy();
+        }
+
+        super.destroy();
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/prism/src/main/java/org/apache/falcon/security/RemoteUserInHeaderBasedAuthenticationHandler.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/security/RemoteUserInHeaderBasedAuthenticationHandler.java b/prism/src/main/java/org/apache/falcon/security/RemoteUserInHeaderBasedAuthenticationHandler.java
new file mode 100644
index 0000000..1d32e86
--- /dev/null
+++ b/prism/src/main/java/org/apache/falcon/security/RemoteUserInHeaderBasedAuthenticationHandler.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.falcon.security;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.authentication.server.AuthenticationToken;
+import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+/**
+ * This class is for backwards compatibility with clients who send Remote-User in the request
+ * header else delegates to PseudoAuthenticationHandler.
+ *
+ * This is a temporary solution until Falcon clients (0.3) are deprecated.
+ */
+public class RemoteUserInHeaderBasedAuthenticationHandler extends PseudoAuthenticationHandler {
+
+    @Override
+    public AuthenticationToken authenticate(HttpServletRequest request, HttpServletResponse response)
+        throws IOException, AuthenticationException {
+
+        String userName = request.getHeader("Remote-User");
+        if (StringUtils.isEmpty(userName)) {
+            return super.authenticate(request, response);
+        } else {
+            return new AuthenticationToken(userName, userName, getType());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java b/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
index 6ac926d..7e2a6c1 100644
--- a/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
+++ b/prism/src/main/java/org/apache/falcon/service/FalconTopicSubscriber.java
@@ -26,6 +26,7 @@ import org.apache.falcon.rerun.event.RerunEvent.RerunType;
 import org.apache.falcon.rerun.handler.AbstractRerunHandler;
 import org.apache.falcon.rerun.handler.RerunHandlerFactory;
 import org.apache.falcon.resource.InstancesResult;
+import org.apache.falcon.security.CurrentUser;
 import org.apache.falcon.workflow.WorkflowEngineFactory;
 import org.apache.falcon.workflow.engine.AbstractWorkflowEngine;
 import org.apache.log4j.Logger;
@@ -40,13 +41,14 @@ import java.util.Date;
 public class FalconTopicSubscriber implements MessageListener, ExceptionListener {
     private static final Logger LOG = Logger.getLogger(FalconTopicSubscriber.class);
 
-    private TopicSubscriber subscriber;
-    private String implementation;
-    private String userName;
-    private String password;
-    private String url;
-    private String topicName;
+    private final String implementation;
+    private final String userName;
+    private final String password;
+    private final String url;
+    private final String topicName;
+
     private Connection connection;
+    private TopicSubscriber subscriber;
 
     private AbstractRerunHandler retryHandler = RerunHandlerFactory.getRerunHandler(RerunType.RETRY);
     private AbstractRerunHandler latedataHandler = RerunHandlerFactory.getRerunHandler(RerunType.LATE);
@@ -62,8 +64,7 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
 
     public void startSubscriber() throws FalconException {
         try {
-            connection = createAndGetConnection(implementation, userName,
-                    password, url);
+            connection = createAndGetConnection(implementation, userName, password, url);
             TopicSession session = (TopicSession) connection.createSession(
                     false, Session.AUTO_ACKNOWLEDGE);
             Topic destination = session.createTopic(topicName);
@@ -72,8 +73,7 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
             connection.setExceptionListener(this);
             connection.start();
         } catch (Exception e) {
-            LOG.error("Error starting subscriber of topic: " + this.toString(),
-                    e);
+            LOG.error("Error starting subscriber of topic: " + this.toString(), e);
             throw new FalconException(e);
         }
     }
@@ -82,40 +82,41 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
     public void onMessage(Message message) {
         MapMessage mapMessage = (MapMessage) message;
         try {
-            debug(mapMessage);
+            if (LOG.isDebugEnabled()) {debug(mapMessage); }
             String cluster = mapMessage.getString(ARG.cluster.getArgName());
             String entityName = mapMessage.getString(ARG.entityName.getArgName());
             String entityType = mapMessage.getString(ARG.entityType.getArgName());
             String workflowId = mapMessage.getString(ARG.workflowId.getArgName());
+            String workflowUser = mapMessage.getString(ARG.workflowUser.getArgName());
             String runId = mapMessage.getString(ARG.runId.getArgName());
             String nominalTime = mapMessage.getString(ARG.nominalTime.getArgName());
             String status = mapMessage.getString(ARG.status.getArgName());
             String operation = mapMessage.getString(ARG.operation.getArgName());
 
+            CurrentUser.authenticate(workflowUser);
             AbstractWorkflowEngine wfEngine = WorkflowEngineFactory.getWorkflowEngine();
             InstancesResult result = wfEngine.getJobDetails(cluster, workflowId);
             Date startTime = result.getInstances()[0].startTime;
             Date endTime = result.getInstances()[0].endTime;
             Long duration = (endTime.getTime() - startTime.getTime()) * 1000000;
+
             if (status.equalsIgnoreCase("FAILED")) {
                 retryHandler.handleRerun(cluster, entityType, entityName,
-                        nominalTime, runId, workflowId,
+                        nominalTime, runId, workflowId, workflowUser,
                         System.currentTimeMillis());
 
                 GenericAlert.instrumentFailedInstance(cluster, entityType,
-                        entityName, nominalTime, workflowId, runId, operation,
-                        SchemaHelper.formatDateUTC(startTime),
-                        "", "", duration);
+                        entityName, nominalTime, workflowId, workflowUser, runId, operation,
+                        SchemaHelper.formatDateUTC(startTime), "", "", duration);
 
             } else if (status.equalsIgnoreCase("SUCCEEDED")) {
                 latedataHandler.handleRerun(cluster, entityType, entityName,
-                        nominalTime, runId, workflowId,
+                        nominalTime, runId, workflowId, workflowUser,
                         System.currentTimeMillis());
 
                 GenericAlert.instrumentSucceededInstance(cluster, entityType,
-                        entityName, nominalTime, workflowId, runId, operation,
-                        SchemaHelper.formatDateUTC(startTime),
-                        duration);
+                        entityName, nominalTime, workflowId, workflowUser, runId, operation,
+                        SchemaHelper.formatDateUTC(startTime), duration);
 
                 notifySLAService(cluster, entityName, entityType, nominalTime, duration);
             }
@@ -143,17 +144,14 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
     }
 
     private void debug(MapMessage mapMessage) throws JMSException {
-        if (LOG.isDebugEnabled()) {
-            StringBuffer buff = new StringBuffer();
-            buff.append("Received:{");
-            for (ARG arg : ARG.values()) {
-                buff.append(arg.getArgName()).append('=').
-                        append(mapMessage.getString(arg.getArgName())).
-                        append(", ");
-            }
-            buff.append("}");
-            LOG.debug(buff);
+        StringBuilder buff = new StringBuilder();
+        buff.append("Received:{");
+        for (ARG arg : ARG.values()) {
+            buff.append(arg.getArgName()).append('=')
+                .append(mapMessage.getString(arg.getArgName())).append(", ");
         }
+        buff.append("}");
+        LOG.debug(buff);
     }
 
     @Override
@@ -164,11 +162,14 @@ public class FalconTopicSubscriber implements MessageListener, ExceptionListener
     public void closeSubscriber() throws FalconException {
         try {
             LOG.info("Closing subscriber on topic : " + this.topicName);
-            subscriber.close();
-            connection.close();
+            if (subscriber != null) {
+                subscriber.close();
+            }
+            if (connection != null) {
+                connection.close();
+            }
         } catch (JMSException e) {
-            LOG.error("Error closing subscriber of topic: " + this.toString(),
-                    e);
+            LOG.error("Error closing subscriber of topic: " + this.toString(), e);
             throw new FalconException(e);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/prism/src/main/java/org/apache/falcon/service/ProcessSubscriberService.java
----------------------------------------------------------------------
diff --git a/prism/src/main/java/org/apache/falcon/service/ProcessSubscriberService.java b/prism/src/main/java/org/apache/falcon/service/ProcessSubscriberService.java
index 1cd7776..9ae1ad1 100644
--- a/prism/src/main/java/org/apache/falcon/service/ProcessSubscriberService.java
+++ b/prism/src/main/java/org/apache/falcon/service/ProcessSubscriberService.java
@@ -62,6 +62,8 @@ public class ProcessSubscriberService implements FalconService {
 
     @Override
     public void destroy() throws FalconException {
-        subscriber.closeSubscriber();
+        if (subscriber != null) { // in case there was an exception while starting subscriber
+            subscriber.closeSubscriber();
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/prism/src/test/java/org/apache/falcon/aspect/GenericAlertTest.java
----------------------------------------------------------------------
diff --git a/prism/src/test/java/org/apache/falcon/aspect/GenericAlertTest.java b/prism/src/test/java/org/apache/falcon/aspect/GenericAlertTest.java
index 1db71fd..919f821 100644
--- a/prism/src/test/java/org/apache/falcon/aspect/GenericAlertTest.java
+++ b/prism/src/test/java/org/apache/falcon/aspect/GenericAlertTest.java
@@ -28,8 +28,8 @@ public class GenericAlertTest {
 
     @Test
     public void testWfInstanceFailedAlert() throws Exception {
-        GenericAlert.instrumentFailedInstance("cluster", "process", "agg-coord", "120:df", "ef-id", "1",
-                "DELETE", "now", "error", "none", 1242);
+        GenericAlert.instrumentFailedInstance("cluster", "process", "agg-coord", "120:df",
+                "ef-id", "wf-user", "1", "DELETE", "now", "error", "none", 1242);
     }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-falcon/blob/3c51f105/prism/src/test/java/org/apache/falcon/service/FalconTopicSubscriberTest.java
----------------------------------------------------------------------
diff --git a/prism/src/test/java/org/apache/falcon/service/FalconTopicSubscriberTest.java b/prism/src/test/java/org/apache/falcon/service/FalconTopicSubscriberTest.java
index f1536f4..9b1d42a 100644
--- a/prism/src/test/java/org/apache/falcon/service/FalconTopicSubscriberTest.java
+++ b/prism/src/test/java/org/apache/falcon/service/FalconTopicSubscriberTest.java
@@ -34,9 +34,7 @@ import javax.jms.*;
  */
 public class FalconTopicSubscriberTest {
 
-    private static final String BROKER_URL = "vm://localhost?broker.useJmx=false&broker.persistent=true";
-    // private static final String BROKER_URL =
-    // "tcp://localhost:61616?daemon=true";
+    private static final String BROKER_URL = "vm://localhost";
     private static final String BROKER_IMPL_CLASS = "org.apache.activemq.ActiveMQConnectionFactory";
     private static final String TOPIC_NAME = "FALCON.ENTITY.TOPIC";
     private static final String SECONDARY_TOPIC_NAME = "FALCON.ENTITY.SEC.TOPIC";
@@ -78,8 +76,7 @@ public class FalconTopicSubscriberTest {
         MapMessage mapMessage = session.createMapMessage();
         message.getKeyValueMap().put(ARG.status, "FAILED");
         for (ARG arg : ARG.values()) {
-            mapMessage.setString(arg.getPropName(), message
-                    .getKeyValueMap().get(arg));
+            mapMessage.setString(arg.getPropName(), message.getKeyValueMap().get(arg));
         }
         producer.send(mapMessage);
     }
@@ -101,6 +98,7 @@ public class FalconTopicSubscriberTest {
         message.getKeyValueMap().put(ARG.workflowId, "workflow-" + i);
         message.getKeyValueMap().put(ARG.topicName, TOPIC_NAME);
         message.getKeyValueMap().put(ARG.status, "SUCCEEDED");
+        message.getKeyValueMap().put(ARG.workflowUser, "falcon");
         return message;
     }