You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2014/02/25 20:57:44 UTC

[1/3] Revert "AMBARI-4791. API call to restart all components on one or more hosts should result in one request. (swagle)"

Repository: ambari
Updated Branches:
  refs/heads/trunk c6987eeef -> 668d4c26d


http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 2e114ef..dc4c5a5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -73,7 +73,6 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.ComponentResourceProviderTest;
 import org.apache.ambari.server.controller.internal.HostResourceProviderTest;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.controller.internal.ServiceResourceProviderTest;
 import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
@@ -2349,12 +2348,8 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h2");
       put("align_maintenance_state", "true");
     }};
-    RequestResourceFilter resourceFilter = new RequestResourceFilter("HBASE", "HBASE_MASTER", null);
-    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
-    resourceFilters.add(resourceFilter);
-
-    ExecuteActionRequest request = new ExecuteActionRequest(clusterName,
-      "DECOMMISSION", null, resourceFilters, params);
+    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HBASE", "HBASE_MASTER",
+        null, params);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -2395,11 +2390,10 @@ public class AmbariManagementControllerTest {
           put("slave_type", "HBASE_REGIONSERVER");
           put("align_maintenance_state", "true");
         }};
-    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
-    resourceFilter = new RequestResourceFilter("HBASE", "HBASE_MASTER", null);
-    request.getResourceFilters().add(resourceFilter);
+    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HBASE", "HBASE_MASTER", null, params);
 
-    response = controller.createAction(request, requestProperties);
+    response = controller.createAction(request,
+        requestProperties);
 
     storedTasks = actionDB.getRequestTasks(response.getRequestId());
     execCmd = storedTasks.get(0).getExecutionCommandWrapper
@@ -2422,8 +2416,8 @@ public class AmbariManagementControllerTest {
     params = new HashMap<String, String>() {{
       put("included_hosts", "h2");
     }};
-    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null,
-      resourceFilters, params);
+    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HBASE", "HBASE_MASTER",
+        null, params);
 
     response = controller.createAction(request,
         requestProperties);
@@ -3848,11 +3842,8 @@ public class AmbariManagementControllerTest {
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
 
     ArrayList<String> hosts = new ArrayList<String>() {{add("h1");}};
-    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", "DATANODE", hosts);
-    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
-    resourceFilters.add(resourceFilter);
 
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", "HDFS", "DATANODE", hosts, params);
     RequestStatusResponse response = controller.createAction(actionRequest, requestProperties);
     assertEquals(1, response.getTasks().size());
     ShortTaskStatus taskStatus = response.getTasks().get(0);
@@ -3868,13 +3859,10 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals("h1", task.getHostName());
     ExecutionCommand cmd = task.getExecutionCommandWrapper().getExecutionCommand();
     Assert.assertTrue(cmd.getCommandParams().containsKey("test"));
-    Assert.assertEquals("HDFS", cmd.getServiceName());
-    Assert.assertEquals("DATANODE", cmd.getComponentName());
+    Assert.assertEquals(cmd.getServiceName(), "HDFS");
+    Assert.assertEquals(cmd.getComponentName(), "DATANODE");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("", "", null);
-    resourceFilters.add(resourceFilter);
-    actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a2", "", "", null, params);
     response = controller.createAction(actionRequest, requestProperties);
     assertEquals(2, response.getTasks().size());
 
@@ -3882,27 +3870,18 @@ public class AmbariManagementControllerTest {
     task = storedTasks2.get(1);
     Assert.assertEquals(RoleCommand.ACTIONEXECUTE, task.getRoleCommand());
     Assert.assertEquals("a2", task.getRole().name());
-    HashSet<String> expectedHosts = new HashSet<String>() {{
-      add("h2");
-      add("h1");
-    }};
-    HashSet<String> actualHosts = new HashSet<String>() {{
-      add(storedTasks2.get(1).getHostName());
-      add(storedTasks2.get(0).getHostName());
-    }};
+    HashSet<String> expectedHosts = new HashSet<String>(){{add("h2"); add("h1");}};
+    HashSet<String> actualHosts = new HashSet<String>(){{add(storedTasks2.get(1).getHostName()); add(storedTasks2
+        .get(0).getHostName());}};
     Assert.assertEquals(expectedHosts, actualHosts);
 
     cmd = task.getExecutionCommandWrapper().getExecutionCommand();
     Assert.assertTrue(cmd.getCommandParams().containsKey("test"));
-    Assert.assertEquals("HDFS", cmd.getServiceName());
-    Assert.assertEquals("DATANODE", cmd.getComponentName());
+    Assert.assertEquals(cmd.getServiceName(), "HDFS");
+    Assert.assertEquals(cmd.getComponentName(), "DATANODE");
 
     hosts = new ArrayList<String>() {{add("h3");}};
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("", "", hosts);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", "", "", hosts, params);
     response = controller.createAction(actionRequest, requestProperties);
     assertEquals(1, response.getTasks().size());
     taskStatus = response.getTasks().get(0);
@@ -3972,13 +3951,10 @@ public class AmbariManagementControllerTest {
     Map<String, String> params = new HashMap<String, String>() {{
       put("test", "test");
     }};
-    RequestResourceFilter resourceFilter = new RequestResourceFilter(
-      "HDFS",
-      "HDFS_CLIENT",
-      new ArrayList<String>() {{ add("h1"); }});
     ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1",
-      "RESTART", params);
-    actionRequest.getResourceFilters().add(resourceFilter);
+      "RESTART", null, "HDFS", "HDFS_CLIENT",
+      new ArrayList<String>() {{ add("h1"); }},
+      params);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -4055,50 +4031,34 @@ public class AmbariManagementControllerTest {
     Map<String, String> params = new HashMap<String, String>() {{
       put("test", "test");
     }};
-
-    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
-
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", "NON_EXISTENT_CHECK", params);
-    actionRequest.getResourceFilters().add(resourceFilter);
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", "NON_EXISTENT_CHECK", "HDFS", params);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
 
     expectActionCreationErrorWithMessage(actionRequest, requestProperties, "Unsupported action");
 
-    //actionRequest = new ExecuteActionRequest("c1", "NON_EXISTENT_SERVICE_CHECK", "HDFS", params);
-    //expectActionCreationErrorWithMessage(actionRequest, requestProperties, "Unsupported action");
-
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION_DATANODE", params);
-    actionRequest.getResourceFilters().add(resourceFilter);
-
+    actionRequest = new ExecuteActionRequest("c1", "NON_EXISTENT_SERVICE_CHECK", "HDFS", params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
-      "Unsupported action DECOMMISSION_DATANODE for Service: HDFS and Component: null");
-
-    //actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", "HDFS", params);
-    //expectActionCreationErrorWithMessage(actionRequest, requestProperties, "Unsupported action DECOMMISSION for Service: HDFS and Component: null");
+        "Unsupported action");
 
-    resourceFilter = new RequestResourceFilter("HDFS", "HDFS_CLIENT", null);
-    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
-    resourceFilters.add(resourceFilter);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION_DATANODE", "HDFS", params);
+    expectActionCreationErrorWithMessage(actionRequest, requestProperties,
+        "Unsupported action DECOMMISSION_DATANODE for Service: HDFS and Component: null");
 
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", "HDFS", params);
+    expectActionCreationErrorWithMessage(actionRequest, requestProperties,
+        "Unsupported action DECOMMISSION for Service: HDFS and Component: null");
 
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "HDFS_CLIENT", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Unsupported action DECOMMISSION for Service: HDFS and Component: HDFS_CLIENT");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS", null, null);
-    resourceFilters.add(resourceFilter);
-    actionRequest = new ExecuteActionRequest("c1", null, "DECOMMISSION_DATANODE", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "DECOMMISSION_DATANODE", "HDFS", null, null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action DECOMMISSION_DATANODE does not exist");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("YARN", "RESOURCEMANAGER", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "YARN", "RESOURCEMANAGER", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Service not found, clusterName=c1, serviceName=YARN");
 
@@ -4106,12 +4066,7 @@ public class AmbariManagementControllerTest {
       put("included_hosts", "h1,h2");
       put("excluded_hosts", "h1,h3");
     }};
-
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", null, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Same host cannot be specified for inclusion as well as exclusion. Hosts: [h1]");
 
@@ -4120,21 +4075,13 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h4, h3");
       put("slave_type", "HDFS_CLIENT");
     }};
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", null, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Component HDFS_CLIENT is not supported for decommissioning.");
 
     List<String> hosts = new ArrayList<String>();
     hosts.add("h6");
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", hosts);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", hosts, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Decommission command cannot be issued with target host(s) specified.");
 
@@ -4142,11 +4089,7 @@ public class AmbariManagementControllerTest {
     params2 = new HashMap<String, String>() {{
       put("excluded_hosts", "h1 ");
     }};
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", null, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Component DATANODE on host h1 cannot be decommissioned as its not in STARTED state");
 
@@ -4154,7 +4097,7 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h1 ");
       put("mark_draining_only", "true");
     }};
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", null, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "mark_draining_only is not a valid parameter for NAMENODE");
 
@@ -4174,91 +4117,58 @@ public class AmbariManagementControllerTest {
         "a4", ActionType.SYSTEM, "", "HIVE", "", "Does file exist",
         TargetHostType.ANY, Short.valueOf("100")));
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, null, null);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 requires input 'test' that is not provided");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 requires input 'dirName' that is not provided");
 
     params.put("dirName", "dirName");
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 requires explicit target host(s)");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HIVE", null, null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a2", "HIVE", null, null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a2 targets service HIVE that does not match with expected HDFS");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS", "HDFS_CLIENT", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a2", "HDFS", "HDFS_CLIENT", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a2 targets component HDFS_CLIENT that does not match with expected DATANODE");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS2", "HDFS_CLIENT", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", "HDFS2", "HDFS_CLIENT", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 targets service HDFS2 that does not exist");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS", "HDFS_CLIENT2", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", "HDFS", "HDFS_CLIENT2", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 targets component HDFS_CLIENT2 that does not exist");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("", "HDFS_CLIENT2", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", "", "HDFS_CLIENT2", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 targets component HDFS_CLIENT2 without specifying the target service");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("", "", null);
-    resourceFilters.add(resourceFilter);
-
     // targets a service that is not a member of the stack (e.g. MR not in HDP-2)
-    actionRequest = new ExecuteActionRequest("c1", null, "a3", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a3", "", "", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a3 targets service MAPREDUCE that does not exist");
 
     hosts = new ArrayList<String>();
     hosts.add("h6");
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("", "", hosts);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a2", "", "", hosts, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Request specifies host h6 but its not a valid host based on the target service=HDFS and component=DATANODE");
 
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HIVE", "", null);
-    resourceFilters.add(resourceFilter);
-
-    actionRequest = new ExecuteActionRequest("c1", null, "a4", resourceFilters, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a4", "HIVE", "", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Suitable hosts not found, component=, service=HIVE, cluster=c1, actionName=a4");
 
   }
 
   private void expectActionCreationErrorWithMessage(ExecuteActionRequest actionRequest,
-                                                    Map<String, String> requestProperties,
-                                                    String message) {
+                                                    Map<String, String> requestProperties, String message) {
     try {
       RequestStatusResponse response = controller.createAction(actionRequest, requestProperties);
       Assert.fail("createAction should fail");
@@ -4310,9 +4220,7 @@ public class AmbariManagementControllerTest {
     Map<String, String> params = new HashMap<String, String>() {{
       put("test", "test");
     }};
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", Role.HDFS_SERVICE_CHECK.name(), params);
-    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
-    actionRequest.getResourceFilters().add(resourceFilter);
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", Role.HDFS_SERVICE_CHECK.name(), "HDFS", params);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -4341,9 +4249,7 @@ public class AmbariManagementControllerTest {
     assertNull(hostRoleCommand.getCustomCommandName());
 
     assertEquals(task.getTaskId(), hostRoleCommand.getTaskId());
-    assertNotNull(actionRequest.getResourceFilters());
-    RequestResourceFilter requestResourceFilter = actionRequest.getResourceFilters().get(0);
-    assertEquals(resourceFilter.getServiceName(), hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getServiceName());
+    assertEquals(actionRequest.getServiceName(), hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getServiceName());
     assertEquals(actionRequest.getClusterName(), hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getClusterName());
     assertEquals(actionRequest.getCommandName(), hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getRole());
     assertEquals(Role.HDFS_CLIENT.name(), hostRoleCommand.getEvent().getEvent().getServiceComponentName());
@@ -4351,10 +4257,7 @@ public class AmbariManagementControllerTest {
     assertNotNull(hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getConfigurations());
     assertEquals(2, hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getConfigurations().size());
     assertEquals(requestProperties.get(REQUEST_CONTEXT_PROPERTY), stage.getRequestContext());
-
-    actionRequest = new ExecuteActionRequest("c1", Role.MAPREDUCE_SERVICE_CHECK.name(), null);
-    resourceFilter = new RequestResourceFilter("MAPREDUCE", null, null);
-    actionRequest.getResourceFilters().add(resourceFilter);
+    actionRequest = new ExecuteActionRequest("c1", Role.MAPREDUCE_SERVICE_CHECK.name(), "MAPREDUCE", null);
 
     response = controller.createAction(actionRequest, requestProperties);
 
@@ -5659,9 +5562,8 @@ public class AmbariManagementControllerTest {
     }
     Assert.assertEquals("Expect only one service check.", 1, commandCount);
 
-    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("foo1", Role.HDFS_SERVICE_CHECK.name(), null);
-    actionRequest.getResourceFilters().add(resourceFilter);
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("foo1", Role.HDFS_SERVICE_CHECK.name(),
+        null, "HDFS", null, null, null);
     Map<String, String> requestProperties = new HashMap<String, String>();
 
     RequestStatusResponse response = controller.createAction(actionRequest, requestProperties);
@@ -6057,9 +5959,8 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h2");
       put("align_maintenance_state", "true");
     }};
-    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
-    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
-    request.getResourceFilters().add(resourceFilter);
+    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HDFS", "NAMENODE",
+        null, params);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -6091,9 +5992,7 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h1");
       put("align_maintenance_state", "true");
     }};
-    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
-    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
-    request.getResourceFilters().add(resourceFilter);
+    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HDFS", "NAMENODE", null, params);
 
     response = controller.createAction(request,
         requestProperties);
@@ -6134,9 +6033,7 @@ public class AmbariManagementControllerTest {
       put("included_hosts", "h1 , h2");
       put("align_maintenance_state", "true");
     }};
-    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
-    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
-    request.getResourceFilters().add(resourceFilter);
+    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HDFS", "NAMENODE", null, params);
 
     response = controller.createAction(request,
         requestProperties);
@@ -6171,209 +6068,6 @@ public class AmbariManagementControllerTest {
   }
 
   @Test
-  public void testResourceFiltersWithCustomActions() throws AmbariException {
-    setupClusterWithHosts("c1", "HDP-2.0.6",
-      new ArrayList<String>() {{
-        add("h1");
-        add("h2");
-        add("h3");
-      }},
-      "centos6");
-
-    Cluster cluster = clusters.getCluster("c1");
-    cluster.setDesiredStackVersion(new StackId("HDP-2.0.6"));
-    cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
-
-    ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "global",
-      new HashMap<String, String>() {{
-        put("key1", "value1");
-      }});
-    config1.setVersionTag("version1");
-
-    Config config2 = cf.createNew(cluster, "core-site",
-      new HashMap<String, String>() {{
-        put("key1", "value1");
-      }});
-    config2.setVersionTag("version1");
-
-    cluster.addConfig(config1);
-    cluster.addConfig(config2);
-
-    Service hdfs = cluster.addService("HDFS");
-    hdfs.persist();
-
-    Service mapred = cluster.addService("YARN");
-    mapred.persist();
-
-    hdfs.addServiceComponent(Role.HDFS_CLIENT.name()).persist();
-    hdfs.addServiceComponent(Role.NAMENODE.name()).persist();
-    hdfs.addServiceComponent(Role.DATANODE.name()).persist();
-
-    mapred.addServiceComponent(Role.RESOURCEMANAGER.name()).persist();
-
-    hdfs.getServiceComponent(Role.HDFS_CLIENT.name()).addServiceComponentHost("h1").persist();
-    hdfs.getServiceComponent(Role.NAMENODE.name()).addServiceComponentHost("h1").persist();
-    hdfs.getServiceComponent(Role.DATANODE.name()).addServiceComponentHost("h1").persist();
-    hdfs.getServiceComponent(Role.DATANODE.name()).addServiceComponentHost("h2").persist();
-
-    controller.getAmbariMetaInfo().addActionDefinition(new ActionDefinition(
-      "a1", ActionType.SYSTEM, "", "HDFS", "", "Some custom action.",
-      TargetHostType.ALL, Short.valueOf("100")));
-
-    Map<String, String> params = new HashMap<String, String>() {{
-      put("test", "test");
-    }};
-
-    Map<String, String> requestProperties = new HashMap<String, String>();
-    requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
-
-    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
-    ArrayList<String> hosts = new ArrayList<String>() {{ add("h2"); }};
-    RequestResourceFilter resourceFilter1 = new RequestResourceFilter("HDFS", "DATANODE", hosts);
-
-    hosts = new ArrayList<String>() {{ add("h1"); }};
-    RequestResourceFilter resourceFilter2 = new RequestResourceFilter("HDFS", "NAMENODE", hosts);
-
-    resourceFilters.add(resourceFilter1);
-    resourceFilters.add(resourceFilter2);
-
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
-    RequestStatusResponse response = null;
-    try {
-      response = controller.createAction(actionRequest, requestProperties);
-    } catch (AmbariException ae) {
-      LOG.info("Expected exception.", ae);
-      Assert.assertTrue(ae.getMessage().contains("Custom action definition only " +
-        "allows one resource filter to be specified"));
-    }
-    resourceFilters.remove(resourceFilter1);
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
-    response = controller.createAction(actionRequest, requestProperties);
-
-    assertEquals(1, response.getTasks().size());
-    HostRoleCommand nnCommand = null;
-
-    for (HostRoleCommand hrc : actionDB.getRequestTasks(response.getRequestId())) {
-      if (hrc.getHostName().equals("h1")) {
-        nnCommand = hrc;
-      }
-    }
-
-    Assert.assertNotNull(nnCommand);
-    ExecutionCommand cmd = nnCommand.getExecutionCommandWrapper().getExecutionCommand();
-    Assert.assertEquals("a1", cmd.getRole());
-    Assert.assertTrue(cmd.getCommandParams().containsKey("test"));
-  }
-
-  @Test
-  public void testResourceFiltersWithCustomCommands() throws AmbariException {
-    setupClusterWithHosts("c1", "HDP-2.0.6",
-      new ArrayList<String>() {{
-        add("h1");
-        add("h2");
-        add("h3");
-      }},
-      "centos6");
-
-    Cluster cluster = clusters.getCluster("c1");
-    cluster.setDesiredStackVersion(new StackId("HDP-2.0.6"));
-    cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
-
-    ConfigFactory cf = injector.getInstance(ConfigFactory.class);
-    Config config1 = cf.createNew(cluster, "global",
-      new HashMap<String, String>() {{
-        put("key1", "value1");
-      }});
-    config1.setVersionTag("version1");
-
-    Config config2 = cf.createNew(cluster, "core-site",
-      new HashMap<String, String>() {{
-        put("key1", "value1");
-      }});
-    config2.setVersionTag("version1");
-
-    cluster.addConfig(config1);
-    cluster.addConfig(config2);
-
-    Service hdfs = cluster.addService("HDFS");
-    hdfs.persist();
-
-    Service mapred = cluster.addService("YARN");
-    mapred.persist();
-
-    hdfs.addServiceComponent(Role.HDFS_CLIENT.name()).persist();
-    hdfs.addServiceComponent(Role.NAMENODE.name()).persist();
-    hdfs.addServiceComponent(Role.DATANODE.name()).persist();
-
-    mapred.addServiceComponent(Role.RESOURCEMANAGER.name()).persist();
-
-    hdfs.getServiceComponent(Role.HDFS_CLIENT.name()).addServiceComponentHost("h1").persist();
-    hdfs.getServiceComponent(Role.NAMENODE.name()).addServiceComponentHost("h1").persist();
-    hdfs.getServiceComponent(Role.DATANODE.name()).addServiceComponentHost("h1").persist();
-    hdfs.getServiceComponent(Role.DATANODE.name()).addServiceComponentHost("h2").persist();
-
-    mapred.getServiceComponent(Role.RESOURCEMANAGER.name()).addServiceComponentHost("h2").persist();
-
-    Map<String, String> params = new HashMap<String, String>() {{
-      put("test", "test");
-    }};
-
-    Map<String, String> requestProperties = new HashMap<String, String>();
-    requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
-
-    // Test multiple restarts
-    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
-    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS",
-      Role.DATANODE.name(), new ArrayList<String>() {{ add("h1"); add("h2"); }});
-    resourceFilters.add(resourceFilter);
-    resourceFilter = new RequestResourceFilter("YARN",
-      Role.RESOURCEMANAGER.name(), new ArrayList<String>() {{ add("h2"); }});
-    resourceFilters.add(resourceFilter);
-
-    ExecuteActionRequest request = new ExecuteActionRequest("c1",
-      "RESTART", null, resourceFilters, params);
-
-    RequestStatusResponse response = controller.createAction(request, requestProperties);
-    Assert.assertEquals(3, response.getTasks().size());
-    List<HostRoleCommand> storedTasks = actionDB.getRequestTasks(response.getRequestId());
-
-    Assert.assertNotNull(storedTasks);
-    int expectedRestartCount = 0;
-    for (HostRoleCommand hrc : storedTasks) {
-      Assert.assertEquals("RESTART", hrc.getCustomCommandName());
-      if (hrc.getHostName().equals("h1") && hrc.getRole().equals(Role.DATANODE)) {
-        expectedRestartCount++;
-      } else if(hrc.getHostName().equals("h2")) {
-        if (hrc.getRole().equals(Role.DATANODE)) {
-          expectedRestartCount++;
-        } else if (hrc.getRole().equals(Role.RESOURCEMANAGER)) {
-          expectedRestartCount++;
-        }
-      }
-    }
-
-    Assert.assertEquals("Restart 2 datanodes and 1 Resourcemanager.", 3, expectedRestartCount);
-
-    // Test service checks - specific host
-    resourceFilters.clear();
-    resourceFilter = new RequestResourceFilter("HDFS", null,
-      new ArrayList<String>() {{ add("h2"); }});
-    resourceFilters.add(resourceFilter);
-    request = new ExecuteActionRequest("c1", Role.HDFS_SERVICE_CHECK.name(),
-      null, resourceFilters, null);
-    response = controller.createAction(request, requestProperties);
-
-    Assert.assertEquals(1, response.getTasks().size());
-    storedTasks = actionDB.getRequestTasks(response.getRequestId());
-    Assert.assertNotNull(storedTasks);
-    Assert.assertEquals(Role.HDFS_SERVICE_CHECK.name(),
-      storedTasks.get(0).getRole().name());
-    Assert.assertEquals("h2", storedTasks.get(0).getHostName());
-  }
-
-
-  @Test
   public void testConfigsAttachedToServiceChecks() throws AmbariException {
     String clusterName = "foo1";
     createCluster(clusterName);
@@ -6869,9 +6563,8 @@ public class AmbariManagementControllerTest {
       put("test", "test");
       put("excluded_hosts", " h1 ");
     }};
-    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
-    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
-    request.getResourceFilters().add(resourceFilter);
+    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null,
+        "HDFS", "NAMENODE", null, params);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -8782,9 +8475,7 @@ public class AmbariManagementControllerTest {
 
       amc.createHostComponents(componentHostRequests);
 
-      RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
-      ExecuteActionRequest ar = new ExecuteActionRequest(CLUSTER_NAME, Role.HDFS_SERVICE_CHECK.name(), null);
-      ar.getResourceFilters().add(resourceFilter);
+      ExecuteActionRequest ar = new ExecuteActionRequest(CLUSTER_NAME, Role.HDFS_SERVICE_CHECK.name(), "HDFS", null);
       amc.createAction(ar, null);
 
       // change mind, delete the cluster

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java
index 489499a..3f46ce0 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java
@@ -28,7 +28,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Host;
@@ -94,12 +93,9 @@ public class MaintenanceStateHelperTest {
     map = rpCapture.getValue();
     
     Assert.assertEquals("nagios_update_ignore", ear.getActionName());
-    Assert.assertEquals(null, ear.getCommandName());
-    Assert.assertEquals(1, ear.getResourceFilters().size());
-    RequestResourceFilter resourceFilter = ear.getResourceFilters().get(0);
-
-    Assert.assertEquals("NAGIOS", resourceFilter.getServiceName());
-    Assert.assertEquals("NAGIOS_SERVER", resourceFilter.getComponentName());
+    Assert.assertEquals("ACTIONEXECUTE", ear.getCommandName());
+    Assert.assertEquals("NAGIOS", ear.getServiceName());
+    Assert.assertEquals("NAGIOS_SERVER", ear.getComponentName());
     Assert.assertEquals("c1", ear.getClusterName());
     Assert.assertTrue(map.containsKey("context"));  
   }
@@ -151,11 +147,9 @@ public class MaintenanceStateHelperTest {
     rpCapture.getValue();
     
     Assert.assertEquals("nagios_update_ignore", ear.getActionName());
-    Assert.assertEquals(null, ear.getCommandName());
-    Assert.assertEquals(1, ear.getResourceFilters().size());
-    RequestResourceFilter resourceFilter = ear.getResourceFilters().get(0);
-    Assert.assertEquals("NAGIOS", resourceFilter.getServiceName());
-    Assert.assertEquals("NAGIOS_SERVER", resourceFilter.getComponentName());
+    Assert.assertEquals("ACTIONEXECUTE", ear.getCommandName());
+    Assert.assertEquals("NAGIOS", ear.getServiceName());
+    Assert.assertEquals("NAGIOS_SERVER", ear.getComponentName());
     Assert.assertEquals("c1", ear.getClusterName());
     Assert.assertTrue(map.containsKey("context"));    
   }
@@ -209,11 +203,9 @@ public class MaintenanceStateHelperTest {
     map = rpCapture.getValue();
     
     Assert.assertEquals("nagios_update_ignore", ear.getActionName());
-    Assert.assertEquals(null, ear.getCommandName());
-    Assert.assertEquals(1, ear.getResourceFilters().size());
-    RequestResourceFilter resourceFilter = ear.getResourceFilters().get(0);
-    Assert.assertEquals("NAGIOS", resourceFilter.getServiceName());
-    Assert.assertEquals("NAGIOS_SERVER", resourceFilter.getComponentName());
+    Assert.assertEquals("ACTIONEXECUTE", ear.getCommandName());
+    Assert.assertEquals("NAGIOS", ear.getServiceName());
+    Assert.assertEquals("NAGIOS_SERVER", ear.getComponentName());
     Assert.assertEquals("c1", ear.getClusterName());
     Assert.assertTrue(map.containsKey("context"));
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
index d01e2e5..e279342 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
@@ -18,8 +18,6 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
@@ -40,7 +38,6 @@ import org.easymock.Capture;
 import org.junit.Assert;
 import org.junit.Test;
 
-import java.lang.reflect.Type;
 import java.util.*;
 
 import static org.easymock.EasyMock.capture;
@@ -766,20 +763,10 @@ public class RequestResourceProviderTest {
 
     properties.put(RequestResourceProvider.REQUEST_CLUSTER_NAME_PROPERTY_ID, "c1");
 
-    final RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
-    List<RequestResourceFilter> resourceFilters =
-      new ArrayList<RequestResourceFilter>() {{
-        add(resourceFilter);
-      }};
-
-    Type listType = new TypeToken<List<RequestResourceFilter>>(){}.getType();
-    String filterJson = new Gson().toJson(resourceFilters, listType);
-
-    properties.put(RequestResourceProvider.REQUEST_RESOURCE_FILTER_ID, filterJson);
-
     propertySet.add(properties);
 
     Map<String, String> requestInfoProperties = new HashMap<String, String>();
+    requestInfoProperties.put(RequestResourceProvider.SERVICE_NAME_ID, "HDFS");
     requestInfoProperties.put(RequestResourceProvider.COMMAND_ID, "HDFS_SERVICE_CHECK");
 
     // create the request
@@ -790,19 +777,14 @@ public class RequestResourceProviderTest {
         PropertyHelper.getKeyPropertyIds(type),
         managementController);
     provider.createResources(request);
-    ExecuteActionRequest capturedRequest = actionRequest.getValue();
-
     Assert.assertTrue(actionRequest.hasCaptured());
-    Assert.assertTrue(capturedRequest.isCommand());
-    Assert.assertEquals(null, capturedRequest.getActionName());
-    Assert.assertEquals("HDFS_SERVICE_CHECK", capturedRequest.getCommandName());
-    Assert.assertNotNull(capturedRequest.getResourceFilters());
-    Assert.assertEquals(1, capturedRequest.getResourceFilters().size());
-    RequestResourceFilter capturedResourceFilter = capturedRequest.getResourceFilters().get(0);
-    Assert.assertEquals("HDFS", capturedResourceFilter.getServiceName());
-    Assert.assertEquals(null, capturedResourceFilter.getComponentName());
-    Assert.assertNotNull(capturedResourceFilter.getHostNames());
-    Assert.assertEquals(0, capturedResourceFilter.getHostNames().size());
+    Assert.assertTrue(actionRequest.getValue().isCommand());
+    Assert.assertEquals(null, actionRequest.getValue().getActionName());
+    Assert.assertEquals("HDFS_SERVICE_CHECK", actionRequest.getValue().getCommandName());
+    Assert.assertEquals("HDFS", actionRequest.getValue().getServiceName());
+    Assert.assertEquals(null, actionRequest.getValue().getComponentName());
+    Assert.assertNotNull(actionRequest.getValue().getHosts());
+    Assert.assertEquals(0, actionRequest.getValue().getHosts().size());
     Assert.assertEquals(0, actionRequest.getValue().getParameters().size());
   }
 
@@ -829,26 +811,13 @@ public class RequestResourceProviderTest {
 
     properties.put(RequestResourceProvider.REQUEST_CLUSTER_NAME_PROPERTY_ID, "c1");
 
-    final RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
-    resourceFilter.getHostNames().add("host1");
-    resourceFilter.getHostNames().add("host2");
-    resourceFilter.getHostNames().add("host3");
-    List<RequestResourceFilter> resourceFilters =
-      new ArrayList<RequestResourceFilter>() {{
-        add(resourceFilter);
-      }};
-
-    Type listType = new TypeToken<List<RequestResourceFilter>>(){}.getType();
-    String filterJson = new Gson().toJson(resourceFilters, listType);
-
-    properties.put(RequestResourceProvider.REQUEST_RESOURCE_FILTER_ID, filterJson);
-
     propertySet.add(properties);
 
     Map<String, String> requestInfoProperties = new HashMap<String, String>();
-
+    requestInfoProperties.put(RequestResourceProvider.SERVICE_NAME_ID, "HDFS");
     requestInfoProperties.put("/parameters/param1", "value1");
     requestInfoProperties.put("/parameters/param2", "value2");
+    requestInfoProperties.put(RequestResourceProvider.HOSTS_ID, "host1 ,host2, host3 ");
 
     String[] expectedHosts = new String[]{"host1", "host2", "host3"};
     Map<String, String> expectedParams = new HashMap<String, String>() {{
@@ -883,19 +852,16 @@ public class RequestResourceProviderTest {
 
     provider.createResources(request);
     Assert.assertTrue(actionRequest.hasCaptured());
-    ExecuteActionRequest capturedRequest = actionRequest.getValue();
-    Assert.assertTrue(capturedRequest.isCommand());
-    Assert.assertEquals(null, capturedRequest.getActionName());
-    Assert.assertEquals("HDFS_SERVICE_CHECK", capturedRequest.getCommandName());
-    Assert.assertEquals(1, capturedRequest.getResourceFilters().size());
-    RequestResourceFilter capturedResourceFilter = capturedRequest.getResourceFilters().get(0);
-    Assert.assertEquals("HDFS", capturedResourceFilter.getServiceName());
-    Assert.assertEquals(null, capturedResourceFilter.getComponentName());
-    Assert.assertEquals(3, capturedResourceFilter.getHostNames().size());
-    Assert.assertArrayEquals(expectedHosts, capturedResourceFilter.getHostNames().toArray());
-    Assert.assertEquals(2, capturedRequest.getParameters().size());
+    Assert.assertTrue(actionRequest.getValue().isCommand());
+    Assert.assertEquals(null, actionRequest.getValue().getActionName());
+    Assert.assertEquals("HDFS_SERVICE_CHECK", actionRequest.getValue().getCommandName());
+    Assert.assertEquals("HDFS", actionRequest.getValue().getServiceName());
+    Assert.assertEquals(null, actionRequest.getValue().getComponentName());
+    Assert.assertEquals(3, actionRequest.getValue().getHosts().size());
+    Assert.assertArrayEquals(expectedHosts, actionRequest.getValue().getHosts().toArray());
+    Assert.assertEquals(2, actionRequest.getValue().getParameters().size());
     for(String key : expectedParams.keySet()) {
-      Assert.assertEquals(expectedParams.get(key), capturedRequest.getParameters().get(key));
+      Assert.assertEquals(expectedParams.get(key), actionRequest.getValue().getParameters().get(key));
     }
   }
 }


[3/3] git commit: Revert "AMBARI-4791. API call to restart all components on one or more hosts should result in one request. (swagle)"

Posted by sw...@apache.org.
Revert "AMBARI-4791. API call to restart all components on one or more hosts should result in one request. (swagle)"

This reverts commit c6987eeef05bce7404635a9a88a4f30b94c134e0.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/668d4c26
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/668d4c26
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/668d4c26

Branch: refs/heads/trunk
Commit: 668d4c26d2af899c4a84873794ee5d4241a0a975
Parents: c6987ee
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Tue Feb 25 11:57:19 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Tue Feb 25 11:57:19 2014 -0800

----------------------------------------------------------------------
 .../ambari/server/actionmanager/Request.java    |  81 ++-
 .../controller/ActionExecutionContext.java      |  87 ++--
 .../controller/AmbariActionExecutionHelper.java | 320 ++++++------
 .../AmbariCustomCommandExecutionHelper.java     | 494 +++++++++++--------
 .../AmbariManagementControllerImpl.java         | 280 ++---------
 .../ambari/server/controller/AmbariServer.java  |   1 -
 .../server/controller/ControllerModule.java     |   5 +-
 .../server/controller/ExecuteActionRequest.java |  48 +-
 .../controller/MaintenanceStateHelper.java      |  15 +-
 .../internal/RequestResourceFilter.java         |  69 ---
 .../internal/RequestResourceProvider.java       |  49 +-
 .../server/orm/entities/RequestEntity.java      |  49 +-
 .../entities/RequestResourceFilterEntity.java   |  94 ----
 .../server/upgrade/UpgradeCatalog150.java       |  10 -
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   4 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   4 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   7 +-
 .../src/main/resources/META-INF/persistence.xml |   2 +-
 .../src/main/resources/properties.json          |   4 +-
 .../actionmanager/TestActionDBAccessorImpl.java |   8 +-
 .../AmbariManagementControllerTest.java         | 431 +++-------------
 .../controller/MaintenanceStateHelperTest.java  |  26 +-
 .../internal/RequestResourceProviderTest.java   |  72 +--
 23 files changed, 749 insertions(+), 1411 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
index 22731ee..d1047a7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
@@ -23,9 +23,7 @@ import com.google.inject.assistedinject.Assisted;
 import com.google.inject.assistedinject.AssistedInject;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.ExecuteActionRequest;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.orm.entities.RequestEntity;
-import org.apache.ambari.server.orm.entities.RequestResourceFilterEntity;
 import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.state.Clusters;
 import org.slf4j.Logger;
@@ -49,7 +47,9 @@ public class Request {
   private long endTime;
   private HostRoleStatus status; // not persisted yet
   private String inputs;
-  private List<RequestResourceFilter> resourceFilters;
+  private String targetService;
+  private String targetComponent;
+  private String targetHosts;
   private RequestType requestType;
 
   private Collection<Stage> stages = new ArrayList<Stage>();
@@ -112,7 +112,9 @@ public class Request {
                  Clusters clusters, Gson gson) throws AmbariException {
     this(stages, clusters);
     if (actionRequest != null) {
-      this.resourceFilters = actionRequest.getResourceFilters();
+      this.targetService = actionRequest.getServiceName();
+      this.targetComponent = actionRequest.getComponentName();
+      this.targetHosts = gson.toJson(actionRequest.getHosts());
       this.inputs = gson.toJson(actionRequest.getParameters());
       this.requestType = actionRequest.isCommand() ? RequestType.COMMAND : RequestType.ACTION;
       this.commandName = actionRequest.isCommand() ? actionRequest.getCommandName() : actionRequest.getActionName();
@@ -136,11 +138,13 @@ public class Request {
     this.endTime = entity.getEndTime();
     this.requestContext = entity.getRequestContext();
     this.inputs = entity.getInputs();
-
+    this.targetService = entity.getTargetService();
+    this.targetComponent = entity.getTargetComponent();
+    this.targetHosts = entity.getTargetHosts();
     this.requestType = entity.getRequestType();
     this.commandName = entity.getCommandName();
     this.status = entity.getStatus();
-    if (entity.getRequestScheduleEntity() != null) {
+    if (entity.getRequestScheduleEntity() !=null) {
       this.requestScheduleId = entity.getRequestScheduleEntity().getScheduleId();
     }
 
@@ -148,28 +152,6 @@ public class Request {
       Stage stage = stageFactory.createExisting(stageEntity);
       stages.add(stage);
     }
-
-    for (RequestResourceFilterEntity resourceFilterEntity : entity.getResourceFilterEntities()) {
-      RequestResourceFilter resourceFilter =
-        new RequestResourceFilter(
-            resourceFilterEntity.getServiceName(),
-            resourceFilterEntity.getComponentName(),
-            getHostsList(resourceFilterEntity.getHosts()));
-      this.resourceFilters.add(resourceFilter);
-    }
-
-  }
-
-  private List<String> getHostsList(String hosts) {
-    List<String> hostList = new ArrayList<String>();
-    if (hosts != null && !hosts.isEmpty()) {
-      for (String host : hosts.split(",")) {
-        if (!host.trim().isEmpty()) {
-          hostList.add(host.trim());
-        }
-      }
-    }
-    return hostList;
   }
 
   public Collection<Stage> getStages() {
@@ -194,22 +176,13 @@ public class Request {
     requestEntity.setEndTime(endTime);
     requestEntity.setRequestContext(requestContext);
     requestEntity.setInputs(inputs);
+    requestEntity.setTargetService(targetService);
+    requestEntity.setTargetComponent(targetComponent);
+    requestEntity.setTargetHosts(targetHosts);
     requestEntity.setRequestType(requestType);
     requestEntity.setRequestScheduleId(requestScheduleId);
     //TODO set all fields
 
-    if (resourceFilters != null) {
-      List<RequestResourceFilterEntity> filterEntities = new ArrayList<RequestResourceFilterEntity>();
-      for (RequestResourceFilter resourceFilter : resourceFilters) {
-        RequestResourceFilterEntity filterEntity = new RequestResourceFilterEntity();
-        filterEntity.setServiceName(resourceFilter.getServiceName());
-        filterEntity.setComponentName(resourceFilter.getComponentName());
-        filterEntity.setRequestEntity(requestEntity);
-        filterEntity.setRequestId(requestId);
-      }
-      requestEntity.setResourceFilterEntities(filterEntities);
-    }
-
     return requestEntity;
   }
 
@@ -258,12 +231,28 @@ public class Request {
     this.inputs = inputs;
   }
 
-  public List<RequestResourceFilter> getResourceFilters() {
-    return resourceFilters;
+  public String getTargetService() {
+    return targetService;
+  }
+
+  public void setTargetService(String targetService) {
+    this.targetService = targetService;
+  }
+
+  public String getTargetComponent() {
+    return targetComponent;
+  }
+
+  public void setTargetComponent(String targetComponent) {
+    this.targetComponent = targetComponent;
+  }
+
+  public String getTargetHosts() {
+    return targetHosts;
   }
 
-  public void setResourceFilters(List<RequestResourceFilter> resourceFilters) {
-    this.resourceFilters = resourceFilters;
+  public void setTargetHosts(String targetHosts) {
+    this.targetHosts = targetHosts;
   }
 
   public RequestType getRequestType() {
@@ -309,7 +298,9 @@ public class Request {
         ", startTime=" + startTime +
         ", endTime=" + endTime +
         ", inputs='" + inputs + '\'' +
-        ", resourceFilters='" + resourceFilters + '\'' +
+        ", targetService='" + targetService + '\'' +
+        ", targetComponent='" + targetComponent + '\'' +
+        ", targetHosts='" + targetHosts + '\'' +
         ", requestType=" + requestType +
         ", stages=" + stages +
         '}';

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
index 37a404f..b59eff1 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
@@ -20,8 +20,8 @@
 package org.apache.ambari.server.controller;
 
 import org.apache.ambari.server.actionmanager.TargetHostType;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -31,46 +31,33 @@ import java.util.Map;
 public class ActionExecutionContext {
   private final String clusterName;
   private final String actionName;
-  private List<RequestResourceFilter> resourceFilters;
-  private Map<String, String> parameters;
-  private TargetHostType targetType;
-  private Short timeout;
-  private String expectedServiceName;
-  private String expectedComponentName;
+  private final String serviceName;
+  private final String componentName;
+  private final String componentCategory;
+  private final List<String> hosts;
+  private final Map<String, String> parameters;
+  private final TargetHostType targetType;
+  private final Short timeout;
 
   /**
    * Create an ActionExecutionContext to execute an action from a request
    */
-  public ActionExecutionContext(String clusterName, String actionName,
-      List<RequestResourceFilter> resourceFilters,
-      Map<String, String> parameters, TargetHostType targetType,
-      Short timeout, String expectedServiceName,
-      String expectedComponentName) {
-
+  public ActionExecutionContext(String clusterName, String actionName, String serviceName,
+                                String componentName, String componentCategory,
+                                List<String> hosts, Map<String, String> parameters,
+                                TargetHostType targetType, Short timeout) {
     this.clusterName = clusterName;
     this.actionName = actionName;
-    this.resourceFilters = resourceFilters;
+    this.serviceName = serviceName;
+    this.componentName = componentName;
+    this.componentCategory = componentCategory;
     this.parameters = parameters;
+    this.hosts = new ArrayList<String>();
+    if (hosts != null) {
+      this.hosts.addAll(hosts);
+    }
     this.targetType = targetType;
     this.timeout = timeout;
-    this.expectedServiceName = expectedServiceName;
-    this.expectedComponentName = expectedComponentName;
-  }
-
-  public ActionExecutionContext(String clusterName, String actionName,
-                                List<RequestResourceFilter> resourceFilters) {
-    this.clusterName = clusterName;
-    this.actionName = actionName;
-    this.resourceFilters = resourceFilters;
-  }
-
-  public ActionExecutionContext(String clusterName, String commandName,
-                                List<RequestResourceFilter> resourceFilters,
-                                Map<String, String> parameters) {
-    this.clusterName = clusterName;
-    this.actionName = commandName;
-    this.resourceFilters = resourceFilters;
-    this.parameters = parameters;
   }
 
   public String getClusterName() {
@@ -81,39 +68,31 @@ public class ActionExecutionContext {
     return actionName;
   }
 
-  public Map<String, String> getParameters() {
-    return parameters;
+  public String getServiceName() {
+    return serviceName;
   }
 
-  public TargetHostType getTargetType() {
-    return targetType;
+  public String getComponentName() {
+    return componentName;
   }
 
-  public Short getTimeout() {
-    return timeout;
+  public Map<String, String> getParameters() {
+    return parameters;
   }
 
-  public List<RequestResourceFilter> getResourceFilters() {
-    return resourceFilters;
+  public List<String> getHosts() {
+    return hosts;
   }
 
-  public String getExpectedServiceName() {
-    return expectedServiceName;
+  public TargetHostType getTargetType() {
+    return targetType;
   }
 
-  public String getExpectedComponentName() {
-    return expectedComponentName;
+  public Short getTimeout() {
+    return timeout;
   }
 
-  @Override
-  public String toString() {
-    return "ActionExecutionContext{" +
-      "clusterName='" + clusterName + '\'' +
-      ", actionName='" + actionName + '\'' +
-      ", resourceFilters=" + resourceFilters +
-      ", parameters=" + parameters +
-      ", targetType=" + targetType +
-      ", timeout=" + timeout +
-      '}';
+  public String getComponentCategory() {
+    return componentCategory;
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
index 85f9078..74aa1fd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
@@ -18,8 +18,16 @@
 
 package org.apache.ambari.server.controller;
 
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
+
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
@@ -29,7 +37,7 @@ import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.actionmanager.TargetHostType;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.state.Cluster;
@@ -43,51 +51,45 @@ import org.apache.ambari.server.utils.StageUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 
 /**
  * Helper class containing logic to process custom action execution requests
  */
-@Singleton
 public class AmbariActionExecutionHelper {
   private final static Logger LOG =
       LoggerFactory.getLogger(AmbariActionExecutionHelper.class);
   private static final String TYPE_PYTHON = "PYTHON";
-
-  @Inject
   private ActionMetadata actionMetadata;
-  @Inject
   private Clusters clusters;
-  @Inject
-  private AmbariManagementController managementController;
-  @Inject
+  private AmbariManagementControllerImpl amcImpl;
   private ActionManager actionManager;
-  @Inject
   private AmbariMetaInfo ambariMetaInfo;
 
+  public AmbariActionExecutionHelper(ActionMetadata actionMetadata, Clusters clusters,
+                                     AmbariManagementControllerImpl amcImpl) {
+    this.amcImpl = amcImpl;
+    this.actionMetadata = actionMetadata;
+    this.clusters = clusters;
+    this.actionManager = amcImpl.getActionManager();
+    this.ambariMetaInfo = amcImpl.getAmbariMetaInfo();
+  }
+
+  // TODO: validate should not return context, should make it consistent with Command Execution helper
   /**
-   * Validates the request to execute an action.
+   * Validates the request to execute an action
+   *
    * @param actionRequest
+   * @param cluster
+   * @return
    * @throws AmbariException
    */
-  public void validateAction(ExecuteActionRequest actionRequest) throws AmbariException {
-    Cluster cluster = clusters.getCluster(actionRequest.getClusterName());
-
-    if (cluster == null) {
-      throw new AmbariException("Unable to find cluster. clusterName = " +
-        actionRequest.getClusterName());
-    }
-
+  public ActionExecutionContext validateCustomAction(ExecuteActionRequest actionRequest, Cluster cluster)
+      throws AmbariException {
     if (actionRequest.getActionName() == null || actionRequest.getActionName().isEmpty()) {
       throw new AmbariException("Action name must be specified");
     }
@@ -97,214 +99,196 @@ public class AmbariActionExecutionHelper {
       throw new AmbariException("Action " + actionRequest.getActionName() + " does not exist");
     }
 
-    if (actionDef.getInputs() != null) {
-      String[] inputs = actionDef.getInputs().split(",");
-      for (String input : inputs) {
-        String inputName = input.trim();
-        if (!inputName.isEmpty()) {
-          boolean mandatory = true;
-          if (inputName.startsWith("[") && inputName.endsWith("]")) {
-            mandatory = false;
-          }
-          if (mandatory && !actionRequest.getParameters().containsKey(inputName)) {
-            throw new AmbariException("Action " + actionRequest.getActionName() + " requires input '" +
-              input.trim() + "' that is not provided.");
-          }
-        }
-      }
-    }
-
-
     StackId stackId = cluster.getCurrentStackVersion();
     String expectedService = actionDef.getTargetService() == null ? "" : actionDef.getTargetService();
-
-    List<RequestResourceFilter> resourceFilters = actionRequest.getResourceFilters();
-    String targetService = "";
-    String targetComponent = "";
-    RequestResourceFilter resourceFilter = null;
-
-    if (resourceFilters != null && !resourceFilters.isEmpty()) {
-      if (resourceFilters.size() > 1) {
-        throw new AmbariException("Custom action definition only allows one " +
-          "resource filter to be specified.");
-      }
-
-      resourceFilter = resourceFilters.get(0);
-      String actualService = resourceFilter.getServiceName() == null ? "" : resourceFilter.getServiceName();
-      if (!expectedService.isEmpty() && !actualService.isEmpty() && !expectedService.equals(actualService)) {
-        throw new AmbariException("Action " + actionRequest.getActionName() + " targets service " + actualService +
+    String actualService = actionRequest.getServiceName() == null ? "" : actionRequest.getServiceName();
+    if (!expectedService.isEmpty() && !actualService.isEmpty() && !expectedService.equals(actualService)) {
+      throw new AmbariException("Action " + actionRequest.getActionName() + " targets service " + actualService +
           " that does not match with expected " + expectedService);
-      }
+    }
 
-      targetService = expectedService;
-      if (targetService == null || targetService.isEmpty()) {
-        targetService = actualService;
-      }
+    String targetService = expectedService;
+    if (targetService == null || targetService.isEmpty()) {
+      targetService = actualService;
+    }
 
-      if (targetService != null && !targetService.isEmpty()) {
-        ServiceInfo serviceInfo;
-        try {
-          serviceInfo = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(),
+    if (targetService != null && !targetService.isEmpty()) {
+      ServiceInfo serviceInfo;
+      try {
+        serviceInfo = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(),
             targetService);
-        } catch (StackAccessException se) {
-          serviceInfo = null;
-        }
+      } catch (StackAccessException se) {
+        serviceInfo = null;
+      }
 
-        if (serviceInfo == null) {
-          throw new AmbariException("Action " + actionRequest.getActionName() +
-            " targets service " + targetService + " that does not exist.");
-        }
+      if (serviceInfo == null) {
+        throw new AmbariException("Action " + actionRequest.getActionName() + " targets service " + targetService +
+            " that does not exist.");
       }
+    }
 
-      String expectedComponent = actionDef.getTargetComponent() == null ? "" : actionDef.getTargetComponent();
-      String actualComponent = resourceFilter.getComponentName() == null ? "" : resourceFilter.getComponentName();
-      if (!expectedComponent.isEmpty() && !actualComponent.isEmpty() && !expectedComponent.equals(actualComponent)) {
-        throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + actualComponent +
+    String expectedComponent = actionDef.getTargetComponent() == null ? "" : actionDef.getTargetComponent();
+    String actualComponent = actionRequest.getComponentName() == null ? "" : actionRequest.getComponentName();
+    if (!expectedComponent.isEmpty() && !actualComponent.isEmpty() && !expectedComponent.equals(actualComponent)) {
+      throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + actualComponent +
           " that does not match with expected " + expectedComponent);
-      }
+    }
 
-      targetComponent = expectedComponent;
-      if (targetComponent == null || targetComponent.isEmpty()) {
-        targetComponent = actualComponent;
-      }
+    String targetComponent = expectedComponent;
+    String componentCategory = "";
+    if (targetComponent == null || targetComponent.isEmpty()) {
+      targetComponent = actualComponent;
+    }
 
-      if (!targetComponent.isEmpty() && targetService.isEmpty()) {
-        throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + targetComponent +
+    if (!targetComponent.isEmpty() && targetService.isEmpty()) {
+      throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + targetComponent +
           " without specifying the target service.");
-      }
+    }
 
-      if (targetComponent != null && !targetComponent.isEmpty()) {
-        ComponentInfo compInfo;
-        try {
-          compInfo = ambariMetaInfo.getComponent(stackId.getStackName(), stackId.getStackVersion(),
+    if (targetComponent != null && !targetComponent.isEmpty()) {
+      ComponentInfo compInfo;
+      try {
+        compInfo = ambariMetaInfo.getComponent(stackId.getStackName(), stackId.getStackVersion(),
             targetService, targetComponent);
-        } catch (StackAccessException se) {
-          compInfo = null;
-        }
+      } catch (StackAccessException se) {
+        compInfo = null;
+      }
 
-        if (compInfo == null) {
-          throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + targetComponent +
+      if (compInfo == null) {
+        throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + targetComponent +
             " that does not exist.");
+      }
+      componentCategory = compInfo.getCategory();
+    }
+
+    if (actionDef.getInputs() != null) {
+      String[] inputs = actionDef.getInputs().split(",");
+      for (String input : inputs) {
+        String inputName = input.trim();
+        if (!inputName.isEmpty()) {
+          boolean mandatory = true;
+          if (inputName.startsWith("[") && inputName.endsWith("]")) {
+            mandatory = false;
+          }
+          if (mandatory && !actionRequest.getParameters().containsKey(inputName)) {
+            throw new AmbariException("Action " + actionRequest.getActionName() + " requires input '" +
+                input.trim() + "' that is not provided.");
+          }
         }
       }
     }
 
-    if (TargetHostType.SPECIFIC.equals(actionDef.getTargetType())
-      || (targetService.isEmpty() && targetComponent.isEmpty())) {
-      if (resourceFilter == null || resourceFilter.getHostNames().size() == 0) {
+    if (TargetHostType.SPECIFIC.name().equals(actionDef.getTargetType())
+        || (targetService.isEmpty() && targetService.isEmpty())) {
+      if (actionRequest.getHosts().size() == 0) {
         throw new AmbariException("Action " + actionRequest.getActionName() + " requires explicit target host(s)" +
-          " that is not provided.");
+            " that is not provided.");
       }
     }
-  }
 
+    LOG.info("Received action execution request"
+        + ", clusterName=" + actionRequest.getClusterName()
+        + ", request=" + actionRequest.toString());
+
+    ActionExecutionContext actionExecutionContext = new ActionExecutionContext(
+      actionRequest.getClusterName(), actionRequest.getActionName(),
+      targetService, targetComponent, componentCategory, actionRequest.getHosts(),
+      actionRequest.getParameters(), actionDef.getTargetType(), actionDef.getDefaultTimeout());
+
+    return actionExecutionContext;
+  }
 
   /**
    * Add tasks to the stage based on the requested action execution
-   * @param actionContext the context associated with the action
-   * @param stage stage into which tasks must be inserted
-   * @param hostLevelParams host level params to send with the command
+   *
+   * @param actionContext   the context associated with the action
+   * @param stage           stage into which tasks must be inserted
+   * @param configuration
+   * @param hostsMap
+   * @param hostLevelParams
    * @throws AmbariException
    */
-  public void addExecutionCommandsToStage(ActionExecutionContext
-    actionContext, Stage stage, Map<String, String> hostLevelParams)
+  public void addAction(ActionExecutionContext actionContext, Stage stage,
+                        Configuration configuration, HostsMap hostsMap, Map<String, String> hostLevelParams)
       throws AmbariException {
-
     String actionName = actionContext.getActionName();
     String clusterName = actionContext.getClusterName();
-    Cluster cluster = clusters.getCluster(clusterName);
-
-    List<RequestResourceFilter> resourceFilters = actionContext.getResourceFilters();
-
-    RequestResourceFilter resourceFilter = new RequestResourceFilter();
-    if (resourceFilters != null && !resourceFilters.isEmpty()) {
-      resourceFilter = resourceFilters.get(0);
-    }
+    String serviceName = actionContext.getServiceName();
+    String componentName = actionContext.getComponentName();
 
     // List of host to select from
     Set<String> candidateHosts = new HashSet<String>();
-
-    String serviceName = actionContext.getExpectedServiceName();
-    String componentName = actionContext.getExpectedComponentName();
-    StackId stackId = cluster.getCurrentStackVersion();
-    ComponentInfo componentInfo = null;
-
-    if (serviceName != null && !serviceName.isEmpty()) {
-      if (componentName != null && !componentName.isEmpty()) {
+    if (!serviceName.isEmpty()) {
+      if (!componentName.isEmpty()) {
         Map<String, ServiceComponentHost> componentHosts =
-          cluster.getService(serviceName)
-            .getServiceComponent(componentName).getServiceComponentHosts();
+            clusters.getCluster(clusterName).getService(serviceName)
+                .getServiceComponent(componentName).getServiceComponentHosts();
         candidateHosts.addAll(componentHosts.keySet());
-        componentInfo = ambariMetaInfo.getComponentCategory(stackId.getStackName(),
-          stackId.getStackVersion(), serviceName, componentName);
       } else {
-        for (String component : cluster.getService(serviceName).getServiceComponents().keySet()) {
+        for (String component : clusters.getCluster(clusterName).getService(serviceName)
+            .getServiceComponents().keySet()) {
           Map<String, ServiceComponentHost> componentHosts =
-            cluster.getService(serviceName)
-              .getServiceComponent(component).getServiceComponentHosts();
+              clusters.getCluster(clusterName).getService(serviceName)
+                  .getServiceComponent(component).getServiceComponentHosts();
           candidateHosts.addAll(componentHosts.keySet());
         }
       }
     } else {
       // All hosts are valid target host
-      candidateHosts.addAll(clusters.getHostsForCluster(cluster.getClusterName()).keySet());
+      candidateHosts.addAll(amcImpl.getClusters().getHostsForCluster(clusterName).keySet());
     }
 
     // If request did not specify hosts and there exists no host
-    if (resourceFilter.getHostNames().isEmpty() && candidateHosts.isEmpty()) {
+    if (actionContext.getHosts().isEmpty() && candidateHosts.isEmpty()) {
       throw new AmbariException("Suitable hosts not found, component="
-        + componentName + ", service=" + serviceName
-        + ", cluster=" + cluster.getClusterName() + ", " +
-        "actionName=" + actionContext.getActionName());
+          + componentName + ", service=" + serviceName
+          + ", cluster=" + clusterName + ", actionName=" + actionName);
     }
 
     // Compare specified hosts to available hosts
-    if (!resourceFilter.getHostNames().isEmpty() && !candidateHosts.isEmpty()) {
-      for (String hostname : resourceFilter.getHostNames()) {
+    if (!actionContext.getHosts().isEmpty() && !candidateHosts.isEmpty()) {
+      for (String hostname : actionContext.getHosts()) {
         if (!candidateHosts.contains(hostname)) {
-          throw new AmbariException("Request specifies host " + hostname +
-            " but its not a valid host based on the " +
-            "target service=" + serviceName + " and component=" + componentName);
+          throw new AmbariException("Request specifies host " + hostname + " but its not a valid host based on the " +
+              "target service=" + serviceName + " and component=" + componentName);
         }
       }
     }
 
-    List<String> targetHosts = resourceFilter.getHostNames();
-
     //Find target hosts to execute
-    if (targetHosts.isEmpty()) {
+    if (actionContext.getHosts().isEmpty()) {
       TargetHostType hostType = actionContext.getTargetType();
       switch (hostType) {
         case ALL:
-          targetHosts.addAll(candidateHosts);
+          actionContext.getHosts().addAll(candidateHosts);
           break;
         case ANY:
-          targetHosts.add(managementController.getHealthyHost(candidateHosts));
+          actionContext.getHosts().add(amcImpl.getHealthyHost(candidateHosts));
           break;
         case MAJORITY:
           for (int i = 0; i < (candidateHosts.size() / 2) + 1; i++) {
-            String hostname = managementController.getHealthyHost(candidateHosts);
-            targetHosts.add(hostname);
+            String hostname = amcImpl.getHealthyHost(candidateHosts);
+            actionContext.getHosts().add(hostname);
             candidateHosts.remove(hostname);
           }
           break;
         default:
-          throw new AmbariException("Unsupported target type = " + hostType);
+          throw new AmbariException("Unsupported target type=" + hostType);
       }
     }
 
     //create tasks for each host
-    for (String hostName : targetHosts) {
-      stage.addHostRoleExecutionCommand(hostName,
-        Role.valueOf(actionContext.getActionName()), RoleCommand.ACTIONEXECUTE,
-          new ServiceComponentHostOpInProgressEvent(actionContext.getActionName(),
-            hostName, System.currentTimeMillis()), clusterName,
-              serviceName);
+    for (String hostName : actionContext.getHosts()) {
+      stage.addHostRoleExecutionCommand(hostName, Role.valueOf(actionContext.getActionName()), RoleCommand.ACTIONEXECUTE,
+          new ServiceComponentHostOpInProgressEvent(actionContext.getActionName(), hostName,
+              System.currentTimeMillis()), clusterName, actionContext.getServiceName());
+
+      Cluster cluster = clusters.getCluster(clusterName);
 
       Map<String, Map<String, String>> configurations = new TreeMap<String, Map<String, String>>();
       Map<String, Map<String, String>> configTags = null;
-      if (!serviceName.isEmpty()) {
-        configTags = managementController.findConfigurationTagsWithOverrides(cluster, hostName);
+      if (!actionContext.getServiceName().isEmpty()) {
+        configTags = amcImpl.findConfigurationTagsWithOverrides(cluster, hostName);
       }
 
       Map<String, String> commandParams = actionContext.getParameters();
@@ -314,7 +298,7 @@ public class AmbariActionExecutionHelper {
       commandParams.put(SCHEMA_VERSION, AmbariMetaInfo.SCHEMA_VERSION_2);
 
       ExecutionCommand execCmd = stage.getExecutionCommandWrapper(hostName,
-        actionContext.getActionName()).getExecutionCommand();
+          actionContext.getActionName()).getExecutionCommand();
 
       /*
        * TODO Execution command field population should be (partially?)
@@ -324,27 +308,25 @@ public class AmbariActionExecutionHelper {
       execCmd.setConfigurationTags(configTags);
       execCmd.setHostLevelParams(hostLevelParams);
       execCmd.setCommandParams(commandParams);
-      execCmd.setServiceName(serviceName == null || serviceName.isEmpty() ?
-        resourceFilter.getServiceName() : serviceName);
-      execCmd.setComponentName(componentName == null || componentName.isEmpty() ?
-        resourceFilter.getComponentName() : componentName);
+      execCmd.setServiceName(serviceName);
+      execCmd.setComponentName(componentName);
 
       Map<String, String> roleParams = execCmd.getRoleParams();
       if (roleParams == null) {
         roleParams = new TreeMap<String, String>();
       }
       roleParams.putAll(actionContext.getParameters());
-      if (componentInfo != null) {
-        roleParams.put(COMPONENT_CATEGORY, componentInfo.getCategory());
-      }
+      roleParams.put(COMPONENT_CATEGORY, actionContext.getComponentCategory());
       execCmd.setRoleParams(roleParams);
 
       // Generate cluster host info
       execCmd.setClusterHostInfo(
-        StageUtils.getClusterHostInfo(clusters.getHostsForCluster(clusterName), cluster));
-
+          StageUtils.getClusterHostInfo(clusters.getHostsForCluster(clusterName), cluster));
+      
       // cluster passive map
-      execCmd.setPassiveInfo(MaintenanceStateHelper.getMaintenanceHostCompoments(clusters, cluster));
+      execCmd.setPassiveInfo(
+          MaintenanceStateHelper.getMaintenanceHostCompoments(clusters, cluster));
+          
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index abd82ac..ba42cc5 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -29,7 +29,6 @@ import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -43,7 +42,9 @@ import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
+import org.apache.ambari.server.state.ServiceComponentHostEvent;
 import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
@@ -52,6 +53,7 @@ import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -60,15 +62,29 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
+
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_COMMAND;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_LIST;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
+
 
 /**
  * Helper class containing logic to process custom command execution requests .
@@ -100,7 +116,7 @@ public class AmbariCustomCommandExecutionHelper {
   @Inject
   private Clusters clusters;
   @Inject
-  private AmbariManagementController managementController;
+  private AmbariManagementController amc;
   @Inject
   private Gson gson;
   @Inject
@@ -110,10 +126,6 @@ public class AmbariCustomCommandExecutionHelper {
   @Inject
   private ConfigHelper configHelper;
 
-  protected static final String SERVICE_CHECK_COMMAND_NAME = "SERVICE_CHECK";
-  protected static final String DECOMMISSION_COMMAND_NAME = "DECOMMISSION";
-
-
   private Boolean isServiceCheckCommand(String command, String service) {
     List<String> actions = actionMetadata.getActions(service);
     if (actions == null || actions.size() == 0) {
@@ -127,98 +139,114 @@ public class AmbariCustomCommandExecutionHelper {
     return true;
   }
 
-  private Boolean isValidCustomCommand(String clusterName,
-      String serviceName, String componentName, String commandName)
-      throws AmbariException {
-
-    Cluster cluster = clusters.getCluster(clusterName);
+  private Boolean isValidCustomCommand(ExecuteActionRequest actionRequest) throws AmbariException {
+    String clustername = actionRequest.getClusterName();
+    Cluster cluster = clusters.getCluster(clustername);
     StackId stackId = cluster.getDesiredStackVersion();
+    String serviceName = actionRequest.getServiceName();
+    String componentName = actionRequest.getComponentName();
+    String commandName = actionRequest.getCommandName();
 
     if (componentName == null) {
       return false;
     }
     ComponentInfo componentInfo = ambariMetaInfo.getComponent(
-      stackId.getStackName(), stackId.getStackVersion(),
-      serviceName, componentName);
-
-    return !(!componentInfo.isCustomCommand(commandName) &&
-      !actionMetadata.isDefaultHostComponentCommand(commandName));
-  }
-
-  private Boolean isValidCustomCommand(ActionExecutionContext
-      actionExecutionContext, RequestResourceFilter resourceFilter)
-      throws AmbariException {
-    String clusterName = actionExecutionContext.getClusterName();
-    String serviceName = resourceFilter.getServiceName();
-    String componentName = resourceFilter.getComponentName();
-    String commandName = actionExecutionContext.getActionName();
+        stackId.getStackName(), stackId.getStackVersion(),
+        serviceName, componentName);
 
-    if (componentName == null) {
+    if (!componentInfo.isCustomCommand(commandName) &&
+        !actionMetadata.isDefaultHostComponentCommand(commandName)) {
       return false;
     }
-
-    return isValidCustomCommand(clusterName, serviceName, componentName, commandName);
+    return true;
   }
 
-  private Boolean isValidCustomCommand(ExecuteActionRequest actionRequest,
-      RequestResourceFilter resourceFilter) throws AmbariException {
-    String clusterName = actionRequest.getClusterName();
-    String serviceName = resourceFilter.getServiceName();
-    String componentName = resourceFilter.getComponentName();
-    String commandName = actionRequest.getCommandName();
+  public void validateCustomCommand(ExecuteActionRequest actionRequest) throws AmbariException {
+    if (actionRequest.getServiceName() == null
+        || actionRequest.getServiceName().isEmpty()
+        || actionRequest.getCommandName() == null
+        || actionRequest.getCommandName().isEmpty()) {
+      throw new AmbariException("Invalid request : " + "cluster="
+          + actionRequest.getClusterName() + ", service="
+          + actionRequest.getServiceName() + ", command="
+          + actionRequest.getCommandName());
+    }
 
-    if (componentName == null) {
-      return false;
+    LOG.info("Received a command execution request"
+        + ", clusterName=" + actionRequest.getClusterName()
+        + ", serviceName=" + actionRequest.getServiceName()
+        + ", request=" + actionRequest.toString());
+
+    if (!isServiceCheckCommand(actionRequest.getCommandName(), actionRequest.getServiceName())
+        && !isValidCustomCommand(actionRequest)) {
+      throw new AmbariException(
+          "Unsupported action " + actionRequest.getCommandName() + " for Service: " + actionRequest.getServiceName()
+              + " and Component: " + actionRequest.getComponentName());
     }
+  }
 
-    return isValidCustomCommand(clusterName, serviceName, componentName, commandName);
+  /**
+   * Other than Service_Check and Decommission all other commands are pass-through
+   *
+   * @param actionRequest   received request to execute a command
+   * @param stage           the initial stage for task creation
+   * @param hostLevelParams specific parameters for the hosts
+   * @throws AmbariException
+   */
+  public void addAction(ExecuteActionRequest actionRequest, Stage stage,
+                        Map<String, String> hostLevelParams)
+      throws AmbariException {
+    if (actionRequest.getCommandName().contains("SERVICE_CHECK")) {
+      findHostAndAddServiceCheckAction(actionRequest, stage, hostLevelParams);
+    } else if (actionRequest.getCommandName().equals("DECOMMISSION")) {
+      addDecommissionAction(actionRequest, stage, hostLevelParams);
+    } else if (isValidCustomCommand(actionRequest)) {
+      String commandDetail = getReadableCustomCommandDetail(actionRequest);
+      addCustomCommandAction(actionRequest, stage, hostLevelParams, null, commandDetail);
+    } else {
+      throw new AmbariException("Unsupported action " + actionRequest.getCommandName());
+    }
   }
 
-  private String getReadableCustomCommandDetail(ActionExecutionContext
-        actionRequest, RequestResourceFilter resourceFilter) {
-    StringBuilder sb = new StringBuilder();
-    sb.append(actionRequest.getActionName());
-    if (resourceFilter.getServiceName() != null
-        && !resourceFilter.getServiceName().equals("")) {
-      sb.append(" ");
-      sb.append(resourceFilter.getServiceName());
+  private String getReadableCustomCommandDetail(ExecuteActionRequest actionRequest) {
+    StringBuffer sb = new StringBuffer();
+    sb.append(actionRequest.getCommandName());
+    if (actionRequest.getServiceName() != null && !actionRequest.getServiceName().equals("")) {
+      sb.append(" " + actionRequest.getServiceName());
     }
-    if (resourceFilter.getComponentName() != null
-        && !resourceFilter.getComponentName().equals("")) {
-      sb.append("/");
-      sb.append(resourceFilter.getComponentName());
+    if (actionRequest.getComponentName() != null && !actionRequest.getComponentName().equals("")) {
+      sb.append("/" + actionRequest.getComponentName());
     }
     return sb.toString();
   }
 
-  private void addCustomCommandAction(ActionExecutionContext actionExecutionContext,
-                                      RequestResourceFilter resourceFilter,
+  private void addCustomCommandAction(ExecuteActionRequest actionRequest,
                                       Stage stage, Map<String, String> hostLevelParams,
                                       Map<String, String> additionalCommandParams,
                                       String commandDetail)
-                                      throws AmbariException {
+      throws AmbariException {
 
-    List<String> hosts = resourceFilter.getHostNames();
-    if (hosts.isEmpty()) {
+    if (actionRequest.getHosts().isEmpty()) {
       throw new AmbariException("Invalid request : No hosts specified.");
     }
 
-    String serviceName = resourceFilter.getServiceName();
-    String componentName = resourceFilter.getComponentName();
-    String commandName = actionExecutionContext.getActionName();
+    String serviceName = actionRequest.getServiceName();
+    String componentName = actionRequest.getComponentName();
+    String commandName = actionRequest.getCommandName();
 
     String clusterName = stage.getClusterName();
     Cluster cluster = clusters.getCluster(clusterName);
     StackId stackId = cluster.getDesiredStackVersion();
-    AmbariMetaInfo ambariMetaInfo = managementController.getAmbariMetaInfo();
-    ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo
-      (stackId.getStackName(), stackId.getStackVersion(), serviceName);
-    StackInfo stackInfo = ambariMetaInfo.getStackInfo
-      (stackId.getStackName(), stackId.getStackVersion());
+    AmbariMetaInfo ambariMetaInfo = amc.getAmbariMetaInfo();
+    ServiceInfo serviceInfo =
+        ambariMetaInfo.getServiceInfo(stackId.getStackName(),
+            stackId.getStackVersion(), serviceName);
+    StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
 
     long nowTimestamp = System.currentTimeMillis();
 
-    for (String hostName : hosts) {
+    for (String hostName : actionRequest.getHosts()) {
 
       Host host = clusters.getHost(hostName);
 
@@ -230,12 +258,12 @@ public class AmbariCustomCommandExecutionHelper {
       Map<String, Map<String, String>> configurations =
           new TreeMap<String, Map<String, String>>();
       Map<String, Map<String, String>> configTags =
-          managementController.findConfigurationTagsWithOverrides(cluster, hostName);
+          amc.findConfigurationTagsWithOverrides(cluster, hostName);
 
       HostRoleCommand cmd = stage.getHostRoleCommand(hostName, componentName);
       if (cmd != null) {
         cmd.setCommandDetail(commandDetail);
-        cmd.setCustomCommandName(commandName);
+        cmd.setCustomCommandName(actionRequest.getCommandName());
       }
 
       ExecutionCommand execCmd = stage.getExecutionCommandWrapper(hostName,
@@ -249,8 +277,7 @@ public class AmbariCustomCommandExecutionHelper {
 
       hostLevelParams.put(CUSTOM_COMMAND, commandName);
       // Set parameters required for re-installing clients on restart
-      hostLevelParams.put(REPO_INFO, getRepoInfo
-        (cluster, host));
+      hostLevelParams.put(REPO_INFO, getRepoInfo(cluster, host));
       execCmd.setHostLevelParams(hostLevelParams);
 
       Map<String, String> commandParams = new TreeMap<String, String>();
@@ -302,50 +329,37 @@ public class AmbariCustomCommandExecutionHelper {
     }
   }
 
-  private void findHostAndAddServiceCheckAction(ActionExecutionContext
-      actionExecutionContext, RequestResourceFilter resourceFilter,
-      Stage stage, Map<String, String> hostLevelParams)
+  private void findHostAndAddServiceCheckAction(ExecuteActionRequest actionRequest, Stage stage,
+                                                Map<String, String> hostLevelParams)
       throws AmbariException {
-
-    String clusterName = actionExecutionContext.getClusterName();
-    String componentName = actionMetadata.getClient(resourceFilter.getServiceName());
-    String serviceName = resourceFilter.getServiceName();
-    String smokeTestRole = actionExecutionContext.getActionName();
+    String clusterName = actionRequest.getClusterName();
+    String componentName = actionMetadata.getClient(actionRequest
+        .getServiceName());
+    String serviceName = actionRequest.getServiceName();
+    String smokeTestRole = actionRequest.getCommandName();
     long nowTimestamp = System.currentTimeMillis();
-    Map<String, String> actionParameters = actionExecutionContext.getParameters();
+    Map<String, String> actionParameters = actionRequest.getParameters();
 
     String hostName;
     if (componentName != null) {
-      Map<String, ServiceComponentHost> components =
-        clusters.getCluster(clusterName).getService(serviceName)
+      Map<String, ServiceComponentHost> components = clusters
+          .getCluster(clusterName).getService(actionRequest.getServiceName())
           .getServiceComponent(componentName).getServiceComponentHosts();
 
       if (components.isEmpty()) {
         throw new AmbariException("Hosts not found, component="
-            + componentName + ", service = " + serviceName
-            + ", cluster = " + clusterName);
-      }
-
-      List<String> candidateHosts = resourceFilter.getHostNames();
-      if (candidateHosts != null && !candidateHosts.isEmpty()) {
-        hostName = managementController.getHealthyHost
-          (new HashSet<String>(candidateHosts));
-
-        if (hostName == null) {
-          LOG.info("Unable to find a healthy host amongst the provided set of " +
-            "hosts. " + candidateHosts);
-        }
-      } else {
-        hostName = managementController.getHealthyHost(components.keySet());
+            + componentName + ", service=" + actionRequest.getServiceName()
+            + ", cluster=" + clusterName);
       }
-
+      hostName = amc.getHealthyHost(components.keySet());
     } else {
       Map<String, ServiceComponent> components = clusters
-        .getCluster(clusterName).getService(serviceName).getServiceComponents();
+          .getCluster(clusterName).getService(actionRequest.getServiceName())
+          .getServiceComponents();
 
       if (components.isEmpty()) {
-        throw new AmbariException("Components not found, service = "
-            + serviceName + ", cluster = " + clusterName);
+        throw new AmbariException("Components not found, service="
+            + actionRequest.getServiceName() + ", cluster=" + clusterName);
       }
 
       ServiceComponent serviceComponent = components.values().iterator()
@@ -353,11 +367,12 @@ public class AmbariCustomCommandExecutionHelper {
 
       if (serviceComponent.getServiceComponentHosts().isEmpty()) {
         throw new AmbariException("Hosts not found, component="
-            + serviceComponent.getName() + ", service = "
-            + serviceName + ", cluster = " + clusterName);
+            + serviceComponent.getName() + ", service="
+            + actionRequest.getServiceName() + ", cluster=" + clusterName);
       }
 
-      hostName = serviceComponent.getServiceComponentHosts().keySet().iterator().next();
+      hostName = serviceComponent.getServiceComponentHosts().keySet()
+          .iterator().next();
     }
 
     addServiceCheckAction(stage, hostName, smokeTestRole, nowTimestamp,
@@ -377,17 +392,17 @@ public class AmbariCustomCommandExecutionHelper {
                                     String componentName,
                                     Map<String, String> actionParameters,
                                     Map<String, String> hostLevelParams)
-                                    throws AmbariException {
+      throws AmbariException {
 
     String clusterName = stage.getClusterName();
     Cluster cluster = clusters.getCluster(clusterName);
     StackId stackId = cluster.getDesiredStackVersion();
-    AmbariMetaInfo ambariMetaInfo = managementController.getAmbariMetaInfo();
+    AmbariMetaInfo ambariMetaInfo = amc.getAmbariMetaInfo();
     ServiceInfo serviceInfo =
         ambariMetaInfo.getServiceInfo(stackId.getStackName(),
             stackId.getStackVersion(), serviceName);
     StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
-      stackId.getStackVersion());
+        stackId.getStackVersion());
 
 
     stage.addHostRoleExecutionCommand(hostname,
@@ -404,7 +419,7 @@ public class AmbariCustomCommandExecutionHelper {
     Map<String, Map<String, String>> configurations =
         new TreeMap<String, Map<String, String>>();
     Map<String, Map<String, String>> configTags =
-        managementController.findConfigurationTagsWithOverrides(cluster, hostname);
+        amc.findConfigurationTagsWithOverrides(cluster, hostname);
 
     ExecutionCommand execCmd = stage.getExecutionCommandWrapper(hostname,
         smokeTestRole).getExecutionCommand();
@@ -474,53 +489,46 @@ public class AmbariCustomCommandExecutionHelper {
    * Processes decommission command. Modifies the host components as needed and then
    * calls into the implementation of a custom command
    */
-  private void addDecommissionAction(ActionExecutionContext actionExecutionContext,
-                                     RequestResourceFilter resourceFilter,
-                                     Stage stage, Map<String, String> hostLevelParams)
-                                     throws AmbariException {
+  private void addDecommissionAction(ExecuteActionRequest request, Stage stage,
+                                     Map<String, String> hostLevelParams)
+      throws AmbariException {
 
-    String clusterName = actionExecutionContext.getClusterName();
+    String clusterName = request.getClusterName();
     Cluster cluster = clusters.getCluster(clusterName);
-    String serviceName = resourceFilter.getServiceName();
-    String componentName = resourceFilter.getComponentName();
-    List<String> hosts = resourceFilter.getHostNames();
+    String serviceName = request.getServiceName();
 
-    if (hosts != null && !hosts.isEmpty()) {
-      throw new AmbariException("Decommission command cannot be issued with " +
-        "target host(s) specified.");
+    if (request.getHosts() != null && request.getHosts().size() != 0) {
+      throw new AmbariException("Decommission command cannot be issued with target host(s) specified.");
     }
 
     //Get all hosts to be added and removed
-    Set<String> excludedHosts = getHostList(actionExecutionContext.getParameters(),
-                                            DECOM_EXCLUDED_HOSTS);
-    Set<String> includedHosts = getHostList(actionExecutionContext.getParameters(),
-                                            DECOM_INCLUDED_HOSTS);
-    String slaveCompType = actionExecutionContext.getParameters().get(DECOM_SLAVE_COMPONENT);
+    Set<String> excludedHosts = getHostList(request.getParameters(), DECOM_EXCLUDED_HOSTS);
+    Set<String> includedHosts = getHostList(request.getParameters(), DECOM_INCLUDED_HOSTS);
+    String slaveCompType = request.getParameters().get(DECOM_SLAVE_COMPONENT);
 
     Set<String> cloneSet = new HashSet<String>(excludedHosts);
     cloneSet.retainAll(includedHosts);
     if (cloneSet.size() > 0) {
-      throw new AmbariException("Same host cannot be specified for inclusion " +
-        "as well as exclusion. Hosts: " + cloneSet.toString());
+      throw new AmbariException("Same host cannot be specified for inclusion as well as exclusion. Hosts: "
+          + cloneSet.toString());
     }
 
     Service service = cluster.getService(serviceName);
     if (service == null) {
-      throw new AmbariException("Specified service " + serviceName +
-        " is not a valid/deployed service.");
+      throw new AmbariException("Specified service " + serviceName + " is not a valid/deployed service.");
     }
 
-    String masterCompType = componentName;
+    String masterCompType = request.getComponentName();
     Map<String, ServiceComponent> svcComponents = service.getServiceComponents();
     if (!svcComponents.containsKey(masterCompType)) {
-      throw new AmbariException("Specified component " + masterCompType +
-        " does not belong to service " + serviceName + ".");
+      throw new AmbariException("Specified component " + masterCompType + " does not belong to service "
+          + serviceName + ".");
     }
 
     ServiceComponent masterComponent = svcComponents.get(masterCompType);
     if (!masterComponent.isMasterComponent()) {
-      throw new AmbariException("Specified component " + masterCompType +
-        " is not a MASTER for service " + serviceName + ".");
+      throw new AmbariException("Specified component " + masterCompType + " is not a MASTER for service "
+          + serviceName + ".");
     }
 
     if (!masterToSlaveMappingForDecom.containsKey(masterCompType)) {
@@ -534,7 +542,7 @@ public class AmbariCustomCommandExecutionHelper {
       throw new AmbariException("Component " + slaveCompType + " is not supported for decommissioning.");
     }
 
-    String isDrainOnlyRequest = actionExecutionContext.getParameters().get(HBASE_MARK_DRAINING_ONLY);
+    String isDrainOnlyRequest = request.getParameters().get(HBASE_MARK_DRAINING_ONLY);
     if (isDrainOnlyRequest != null && !slaveCompType.equals(Role.HBASE_REGIONSERVER.name())) {
       throw new AmbariException(HBASE_MARK_DRAINING_ONLY + " is not a valid parameter for " + masterCompType);
     }
@@ -549,7 +557,7 @@ public class AmbariCustomCommandExecutionHelper {
       }
     }
 
-    String alignMtnStateStr = actionExecutionContext.getParameters().get(ALIGN_MAINTENANCE_STATE);
+    String alignMtnStateStr = request.getParameters().get(ALIGN_MAINTENANCE_STATE);
     boolean alignMtnState = "true".equals(alignMtnStateStr);
     // Set/reset decommissioned flag on all components
     List<String> listOfExcludedHosts = new ArrayList<String>();
@@ -589,18 +597,12 @@ public class AmbariCustomCommandExecutionHelper {
       }
     }
 
-    StringBuilder commandDetail = getReadableDecommissionCommandDetail
-      (actionExecutionContext, includedHosts, listOfExcludedHosts);
+    StringBuilder commandDetail = getReadableDecommissionCommandDetail(request, includedHosts, listOfExcludedHosts);
 
     for (String hostName : masterSchs.keySet()) {
-      RequestResourceFilter commandFilter = new RequestResourceFilter(serviceName,
-        masterComponent.getName(), Collections.singletonList(hostName));
-      List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
-      resourceFilters.add(commandFilter);
-
-      ActionExecutionContext commandContext = new ActionExecutionContext(
-        clusterName, actionExecutionContext.getActionName(), resourceFilters
-      );
+      ExecuteActionRequest commandRequest = new ExecuteActionRequest(
+          request.getClusterName(), request.getCommandName(), request.getActionName(), request.getServiceName(),
+          masterComponent.getName(), Collections.singletonList(hostName), null);
 
       String clusterHostInfoJson = StageUtils.getGson().toJson(
           StageUtils.getClusterHostInfo(clusters.getHostsForCluster(cluster.getClusterName()), cluster));
@@ -620,17 +622,16 @@ public class AmbariCustomCommandExecutionHelper {
 
       if (!serviceName.equals(Service.Type.HBASE.name()) || hostName.equals(primaryCandidate)) {
         commandParams.put(UPDATE_EXCLUDE_FILE_ONLY, "false");
-        addCustomCommandAction(commandContext, commandFilter, stage,
-          hostLevelParams, commandParams, commandDetail.toString());
+        addCustomCommandAction(commandRequest, stage, hostLevelParams, commandParams, commandDetail.toString());
       }
     }
   }
 
-  private StringBuilder getReadableDecommissionCommandDetail(
-      ActionExecutionContext actionExecutionContext, Set<String> includedHosts,
-      List<String> listOfExcludedHosts) {
+  private StringBuilder getReadableDecommissionCommandDetail(ExecuteActionRequest request,
+                                                             Set<String> includedHosts,
+                                                             List<String> listOfExcludedHosts) {
     StringBuilder commandDetail = new StringBuilder();
-    commandDetail.append(actionExecutionContext.getActionName());
+    commandDetail.append(request.getCommandName());
     if (listOfExcludedHosts.size() > 0) {
       commandDetail.append(", Excluded: ").append(StringUtils.join(listOfExcludedHosts, ','));
     }
@@ -641,83 +642,156 @@ public class AmbariCustomCommandExecutionHelper {
   }
 
   /**
-   * Validate custom command and throw exception is invalid request.
-   * @param actionRequest
-   * @throws AmbariException
+   * Creates and populates an EXECUTION_COMMAND for host
    */
-  public void validateAction(ExecuteActionRequest actionRequest) throws AmbariException {
+  public void createHostAction(Cluster cluster,
+                               Stage stage, ServiceComponentHost scHost,
+                               Map<String, Map<String, String>> configurations,
+                               Map<String, Map<String, String>> configTags,
+                               RoleCommand roleCommand,
+                               Map<String, String> commandParams,
+                               ServiceComponentHostEvent event)
+      throws AmbariException {
+
+    stage.addHostRoleExecutionCommand(scHost.getHostName(), Role.valueOf(scHost
+        .getServiceComponentName()), roleCommand,
+        event, scHost.getClusterName(),
+        scHost.getServiceName());
+    String serviceName = scHost.getServiceName();
+    String componentName = event.getServiceComponentName();
+    String hostname = scHost.getHostName();
+    String osType = clusters.getHost(hostname).getOsType();
+    StackId stackId = cluster.getDesiredStackVersion();
+    ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
+        stackId.getStackVersion(), serviceName);
+    ComponentInfo componentInfo = ambariMetaInfo.getComponent(
+        stackId.getStackName(), stackId.getStackVersion(),
+        serviceName, componentName);
+    StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+
+    ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(),
+        scHost.getServiceComponentName()).getExecutionCommand();
 
-    List<RequestResourceFilter> resourceFilters = actionRequest.getResourceFilters();
+    Host host = clusters.getHost(scHost.getHostName());
 
-    if (resourceFilters == null || resourceFilters.isEmpty()) {
-      throw new AmbariException("Command execution cannot proceed without a " +
-        "resource filter.");
+    // Hack - Remove passwords from configs
+    if (event.getServiceComponentName().equals(Role.HIVE_CLIENT.toString())) {
+      configHelper.applyCustomConfig(configurations, Configuration.HIVE_CONFIG_TAG,
+          Configuration.HIVE_METASTORE_PASSWORD_PROPERTY, "", true);
     }
 
-    for (RequestResourceFilter resourceFilter : resourceFilters) {
-      if (resourceFilter.getServiceName() == null
-        || resourceFilter.getServiceName().isEmpty()
-        || actionRequest.getCommandName() == null
-        || actionRequest.getCommandName().isEmpty()) {
-        throw new AmbariException("Invalid resource filter : " + "cluster = "
-          + actionRequest.getClusterName() + ", service = "
-          + resourceFilter.getServiceName() + ", command = "
-          + actionRequest.getCommandName());
+    String jobtrackerHost = amc.getJobTrackerHost(cluster);
+    if (!scHost.getHostName().equals(jobtrackerHost)) {
+      if (configTags.get(Configuration.GLOBAL_CONFIG_TAG) != null) {
+        configHelper.applyCustomConfig(
+            configurations, Configuration.GLOBAL_CONFIG_TAG,
+            Configuration.RCA_ENABLED_PROPERTY, "false", false);
       }
+    }
 
-      if (!isServiceCheckCommand(actionRequest.getCommandName(), resourceFilter.getServiceName())
-        && !isValidCustomCommand(actionRequest, resourceFilter)) {
-        throw new AmbariException(
-          "Unsupported action " + actionRequest.getCommandName() +
-            " for Service: " + resourceFilter.getServiceName()
-            + " and Component: " + resourceFilter.getComponentName());
-      }
+    execCmd.setConfigurations(configurations);
+    execCmd.setConfigurationTags(configTags);
+    if (commandParams == null) { // if not defined
+      commandParams = new TreeMap<String, String>();
     }
-  }
+    commandParams.put(SCHEMA_VERSION, serviceInfo.getSchemaVersion());
 
-  /**
-   * Other than Service_Check and Decommission all other commands are pass-through
-   * @param actionExecutionContext received request to execute a command
-   * @param stage the initial stage for task creation
-   * @param hostLevelParams specific parameters for the hosts
-   * @throws AmbariException
-   */
-  public void addExecutionCommandsToStage(ActionExecutionContext actionExecutionContext,
-                                          Stage stage, Map<String, String> hostLevelParams)
-                                          throws AmbariException {
-
-    List<RequestResourceFilter> resourceFilters = actionExecutionContext.getResourceFilters();
-
-    for (RequestResourceFilter resourceFilter : resourceFilters) {
-      LOG.debug("Received a command execution request"
-        + ", clusterName=" + actionExecutionContext.getClusterName()
-        + ", serviceName=" + resourceFilter.getServiceName()
-        + ", request=" + actionExecutionContext.toString());
-
-      if (actionExecutionContext.getActionName().contains(SERVICE_CHECK_COMMAND_NAME)) {
-        findHostAndAddServiceCheckAction(actionExecutionContext,
-          resourceFilter, stage, hostLevelParams);
-      } else if (actionExecutionContext.getActionName().equals(DECOMMISSION_COMMAND_NAME)) {
-        addDecommissionAction(actionExecutionContext, resourceFilter, stage, hostLevelParams);
-      } else if (isValidCustomCommand(actionExecutionContext, resourceFilter)) {
-        String commandDetail = getReadableCustomCommandDetail(actionExecutionContext, resourceFilter);
-        addCustomCommandAction(actionExecutionContext, resourceFilter, stage,
-          hostLevelParams, null, commandDetail);
+
+    // Get command script info for custom command/custom action
+    /*
+     * TODO: Custom actions are not supported yet, that's why we just pass
+     * component main commandScript to agent. This script is only used for
+     * default commads like INSTALL/STOP/START/CONFIGURE
+     */
+    String commandTimeout = configs.getDefaultAgentTaskTimeout();
+    CommandScriptDefinition script = componentInfo.getCommandScript();
+    if (serviceInfo.getSchemaVersion().equals(AmbariMetaInfo.SCHEMA_VERSION_2)) {
+      if (script != null) {
+        commandParams.put(SCRIPT, script.getScript());
+        commandParams.put(SCRIPT_TYPE, script.getScriptType().toString());
+        if (script.getTimeout() > 0) {
+          commandTimeout = String.valueOf(script.getTimeout());
+        }
       } else {
-        throw new AmbariException("Unsupported action " +
-          actionExecutionContext.getActionName());
+        String message = String.format("Component %s of service %s has no " +
+            "command script defined", componentName, serviceName);
+        throw new AmbariException(message);
       }
     }
+    commandParams.put(COMMAND_TIMEOUT, commandTimeout);
+    commandParams.put(SERVICE_PACKAGE_FOLDER,
+        serviceInfo.getServicePackageFolder());
+    commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
+
+    execCmd.setCommandParams(commandParams);
+
+    String repoInfo = getRepoInfo(cluster, host);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Sending repo information to agent"
+          + ", hostname=" + scHost.getHostName()
+          + ", clusterName=" + cluster.getClusterName()
+          + ", stackInfo=" + stackId.getStackId()
+          + ", repoInfo=" + repoInfo);
+    }
+
+    Map<String, String> hostParams = new TreeMap<String, String>();
+    hostParams.put(REPO_INFO, repoInfo);
+    hostParams.put(JDK_LOCATION, amc.getJdkResourceUrl());
+    hostParams.put(JAVA_HOME, amc.getJavaHome());
+    hostParams.put(JDK_NAME, amc.getJDKName());
+    hostParams.put(JCE_NAME, amc.getJCEName());
+    hostParams.put(STACK_NAME, stackId.getStackName());
+    hostParams.put(STACK_VERSION, stackId.getStackVersion());
+    hostParams.put(DB_NAME, amc.getServerDB());
+    hostParams.put(MYSQL_JDBC_URL, amc.getMysqljdbcUrl());
+    hostParams.put(ORACLE_JDBC_URL, amc.getOjdbcUrl());
+    hostParams.putAll(amc.getRcaParameters());
+
+    // Write down os specific info for the service
+    ServiceOsSpecific anyOs = null;
+    if (serviceInfo.getOsSpecifics().containsKey(AmbariMetaInfo.ANY_OS)) {
+      anyOs = serviceInfo.getOsSpecifics().get(AmbariMetaInfo.ANY_OS);
+    }
+    ServiceOsSpecific hostOs = null;
+    if (serviceInfo.getOsSpecifics().containsKey(osType)) {
+      hostOs = serviceInfo.getOsSpecifics().get(osType);
+      // Choose repo that is relevant for host
+      ServiceOsSpecific.Repo serviceRepo = hostOs.getRepo();
+      if (serviceRepo != null) {
+        String serviceRepoInfo = gson.toJson(serviceRepo);
+        hostParams.put(SERVICE_REPO_INFO, serviceRepoInfo);
+      }
+    }
+    // Build package list that is relevant for host
+    List<ServiceOsSpecific.Package> packages =
+        new ArrayList<ServiceOsSpecific.Package>();
+    if (anyOs != null) {
+      packages.addAll(anyOs.getPackages());
+    }
+
+    if (hostOs != null) {
+      packages.addAll(hostOs.getPackages());
+    }
+    String packageList = gson.toJson(packages);
+    hostParams.put(PACKAGE_LIST, packageList);
+
+    if (configs.getServerDBName().equalsIgnoreCase(Configuration
+        .ORACLE_DB_NAME)) {
+      hostParams.put(DB_DRIVER_FILENAME, configs.getOjdbcJarName());
+    } else if (configs.getServerDBName().equalsIgnoreCase(Configuration
+        .MYSQL_DB_NAME)) {
+      hostParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName());
+    }
+    execCmd.setHostLevelParams(hostParams);
+
+    Map<String, String> roleParams = new TreeMap<String, String>();
+    execCmd.setRoleParams(roleParams);
+    
+    execCmd.setPassiveInfo(MaintenanceStateHelper.getMaintenanceHostCompoments(clusters, cluster));
   }
 
-  /**
-   * Get repository info given a cluster and host.
-   * @param cluster
-   * @param host
-   * @return
-   * @throws AmbariException
-   */
-  public String getRepoInfo(Cluster cluster, Host host) throws AmbariException {
+  private String getRepoInfo(Cluster cluster, Host host) throws AmbariException {
     StackId stackId = cluster.getDesiredStackVersion();
 
     Map<String, List<RepositoryInfo>> repos = ambariMetaInfo.getRepository(


[2/3] Revert "AMBARI-4791. API call to restart all components on one or more hosts should result in one request. (swagle)"

Posted by sw...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 60688af..7e7f381 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -18,11 +18,22 @@
 
 package org.apache.ambari.server.controller;
 
-import com.google.gson.Gson;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Singleton;
-import com.google.inject.persist.Transactional;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TreeMap;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -41,11 +52,9 @@ import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.actionmanager.StageFactory;
-import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.URLStreamProvider;
-import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.scheduler.ExecutionScheduleManager;
@@ -55,15 +64,14 @@ import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.stageplanner.RoleGraph;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
-import org.apache.ambari.server.state.CommandScriptDefinition;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostState;
-import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.OperatingSystemInfo;
+import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.Service;
@@ -74,15 +82,14 @@ import org.apache.ambari.server.state.ServiceComponentHostEvent;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
 import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostDisableEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostDisableEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostRestoreEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent;
@@ -94,48 +101,17 @@ import org.apache.http.client.utils.URIBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeMap;
+import com.google.gson.Gson;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
 
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_DRIVER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_PASSWORD;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_URL;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_USERNAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_LIST;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.*;
 
 @Singleton
-public class AmbariManagementControllerImpl implements AmbariManagementController {
+public class AmbariManagementControllerImpl implements
+    AmbariManagementController {
 
   private final static Logger LOG =
       LoggerFactory.getLogger(AmbariManagementControllerImpl.class);
@@ -209,8 +185,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
   @Inject
   private AmbariCustomCommandExecutionHelper customCommandExecutionHelper;
-  @Inject
-  private AmbariActionExecutionHelper actionExecutionHelper;
+  final private AmbariActionExecutionHelper actionExecutionHelper;
 
   @Inject
   public AmbariManagementControllerImpl(ActionManager actionManager,
@@ -252,6 +227,9 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       this.mysqljdbcUrl = null;
       this.serverDB = null;
     }
+
+    this.actionExecutionHelper = new AmbariActionExecutionHelper(
+        this.actionMetadata, this.clusters, this);
   }
   
   public String getAmbariServerURI(String path) {
@@ -1122,180 +1100,6 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     return executionScheduleManager;
   }
 
-  /**
-   * Creates and populates an EXECUTION_COMMAND for host
-   */
-  private void createHostAction(Cluster cluster,
-                                Stage stage, ServiceComponentHost scHost,
-                                Map<String, Map<String, String>> configurations,
-                                Map<String, Map<String, String>> configTags,
-                                RoleCommand roleCommand,
-                                Map<String, String> commandParams,
-                                ServiceComponentHostEvent event)
-    throws AmbariException {
-
-    stage.addHostRoleExecutionCommand(scHost.getHostName(), Role.valueOf(scHost
-      .getServiceComponentName()), roleCommand,
-      event, scHost.getClusterName(),
-      scHost.getServiceName());
-    String serviceName = scHost.getServiceName();
-    String componentName = event.getServiceComponentName();
-    String hostname = scHost.getHostName();
-    String osType = clusters.getHost(hostname).getOsType();
-    StackId stackId = cluster.getDesiredStackVersion();
-    ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
-      stackId.getStackVersion(), serviceName);
-    ComponentInfo componentInfo = ambariMetaInfo.getComponent(
-      stackId.getStackName(), stackId.getStackVersion(),
-      serviceName, componentName);
-    StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
-      stackId.getStackVersion());
-
-    ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(),
-      scHost.getServiceComponentName()).getExecutionCommand();
-
-    Host host = clusters.getHost(scHost.getHostName());
-
-    // Hack - Remove passwords from configs
-    if (event.getServiceComponentName().equals(Role.HIVE_CLIENT.toString())) {
-      configHelper.applyCustomConfig(configurations, Configuration.HIVE_CONFIG_TAG,
-        Configuration.HIVE_METASTORE_PASSWORD_PROPERTY, "", true);
-    }
-
-    String jobtrackerHost = getJobTrackerHost(cluster);
-    if (!scHost.getHostName().equals(jobtrackerHost)) {
-      if (configTags.get(Configuration.GLOBAL_CONFIG_TAG) != null) {
-        configHelper.applyCustomConfig(
-          configurations, Configuration.GLOBAL_CONFIG_TAG,
-          Configuration.RCA_ENABLED_PROPERTY, "false", false);
-      }
-    }
-
-    execCmd.setConfigurations(configurations);
-    execCmd.setConfigurationTags(configTags);
-    if (commandParams == null) { // if not defined
-      commandParams = new TreeMap<String, String>();
-    }
-    commandParams.put(SCHEMA_VERSION, serviceInfo.getSchemaVersion());
-
-
-    // Get command script info for custom command/custom action
-    /*
-     * TODO: Custom actions are not supported yet, that's why we just pass
-     * component main commandScript to agent. This script is only used for
-     * default commads like INSTALL/STOP/START/CONFIGURE
-     */
-    String commandTimeout = configs.getDefaultAgentTaskTimeout();
-    CommandScriptDefinition script = componentInfo.getCommandScript();
-    if (serviceInfo.getSchemaVersion().equals(AmbariMetaInfo.SCHEMA_VERSION_2)) {
-      if (script != null) {
-        commandParams.put(SCRIPT, script.getScript());
-        commandParams.put(SCRIPT_TYPE, script.getScriptType().toString());
-        if (script.getTimeout() > 0) {
-          commandTimeout = String.valueOf(script.getTimeout());
-        }
-      } else {
-        String message = String.format("Component %s of service %s has no " +
-          "command script defined", componentName, serviceName);
-        throw new AmbariException(message);
-      }
-    }
-    commandParams.put(COMMAND_TIMEOUT, commandTimeout);
-    commandParams.put(SERVICE_PACKAGE_FOLDER,
-      serviceInfo.getServicePackageFolder());
-    commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
-
-    execCmd.setCommandParams(commandParams);
-
-    String repoInfo = customCommandExecutionHelper.getRepoInfo(cluster, host);
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("Sending repo information to agent"
-        + ", hostname=" + scHost.getHostName()
-        + ", clusterName=" + cluster.getClusterName()
-        + ", stackInfo=" + stackId.getStackId()
-        + ", repoInfo=" + repoInfo);
-    }
-
-    Map<String, String> hostParams = new TreeMap<String, String>();
-    hostParams.put(REPO_INFO, repoInfo);
-    hostParams.put(JDK_LOCATION, getJdkResourceUrl());
-    hostParams.put(JAVA_HOME, getJavaHome());
-    hostParams.put(JDK_NAME, getJDKName());
-    hostParams.put(JCE_NAME, getJCEName());
-    hostParams.put(STACK_NAME, stackId.getStackName());
-    hostParams.put(STACK_VERSION, stackId.getStackVersion());
-    hostParams.put(DB_NAME, getServerDB());
-    hostParams.put(MYSQL_JDBC_URL, getMysqljdbcUrl());
-    hostParams.put(ORACLE_JDBC_URL, getOjdbcUrl());
-    hostParams.putAll(getRcaParameters());
-
-    // Write down os specific info for the service
-    ServiceOsSpecific anyOs = null;
-    if (serviceInfo.getOsSpecifics().containsKey(AmbariMetaInfo.ANY_OS)) {
-      anyOs = serviceInfo.getOsSpecifics().get(AmbariMetaInfo.ANY_OS);
-    }
-    ServiceOsSpecific hostOs = null;
-    if (serviceInfo.getOsSpecifics().containsKey(osType)) {
-      hostOs = serviceInfo.getOsSpecifics().get(osType);
-      // Choose repo that is relevant for host
-      ServiceOsSpecific.Repo serviceRepo = hostOs.getRepo();
-      if (serviceRepo != null) {
-        String serviceRepoInfo = gson.toJson(serviceRepo);
-        hostParams.put(SERVICE_REPO_INFO, serviceRepoInfo);
-      }
-    }
-    // Build package list that is relevant for host
-    List<ServiceOsSpecific.Package> packages =
-      new ArrayList<ServiceOsSpecific.Package>();
-    if (anyOs != null) {
-      packages.addAll(anyOs.getPackages());
-    }
-
-    if (hostOs != null) {
-      packages.addAll(hostOs.getPackages());
-    }
-    String packageList = gson.toJson(packages);
-    hostParams.put(PACKAGE_LIST, packageList);
-
-    if (configs.getServerDBName().equalsIgnoreCase(Configuration
-      .ORACLE_DB_NAME)) {
-      hostParams.put(DB_DRIVER_FILENAME, configs.getOjdbcJarName());
-    } else if (configs.getServerDBName().equalsIgnoreCase(Configuration
-      .MYSQL_DB_NAME)) {
-      hostParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName());
-    }
-    execCmd.setHostLevelParams(hostParams);
-
-    Map<String, String> roleParams = new TreeMap<String, String>();
-    execCmd.setRoleParams(roleParams);
-
-    execCmd.setPassiveInfo(MaintenanceStateHelper.getMaintenanceHostCompoments(clusters, cluster));
-  }
-
-  private ActionExecutionContext getActionExecutionContext
-      (ExecuteActionRequest actionRequest) throws AmbariException {
-
-    if (actionRequest.isCommand()) {
-      return new ActionExecutionContext(actionRequest.getClusterName(),
-        actionRequest.getCommandName(), actionRequest.getResourceFilters(),
-        actionRequest.getParameters());
-    } else {
-
-    ActionDefinition actionDef = ambariMetaInfo.getActionDefinition(actionRequest.getActionName());
-
-    if (actionDef == null) {
-      throw new AmbariException("Action " + actionRequest.getActionName() + " does not exist");
-    }
-
-    return new ActionExecutionContext(actionRequest.getClusterName(),
-      actionRequest.getActionName(), actionRequest.getResourceFilters(),
-      actionRequest.getParameters(), actionDef.getTargetType(),
-      actionDef.getDefaultTimeout(), actionDef.getTargetService(),
-      actionDef.getTargetComponent());
-
-    }
-  }
-
   private List<Stage> doStageCreation(Cluster cluster,
       Map<State, List<Service>> changedServices,
       Map<State, List<ServiceComponent>> changedComps,
@@ -1490,8 +1294,9 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
               }
             }
 
-            createHostAction(cluster, stage, scHost, configurations, configTags,
-              roleCommand, requestParameters, event);
+            customCommandExecutionHelper.createHostAction(cluster, stage, scHost,
+                    configurations, configTags,
+                    roleCommand, requestParameters, event);
           }
         }
       }
@@ -1514,8 +1319,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         }
 
         customCommandExecutionHelper.addServiceCheckAction(stage, clientHost,
-          smokeTestRole, nowTimestamp, serviceName,
-          null, null, createDefaultHostParams(cluster));
+            smokeTestRole, nowTimestamp, serviceName,
+            null, null, createDefaultHostParams(cluster));
       }
 
       RoleCommandOrder rco = getRoleCommandOrder(cluster);
@@ -2313,15 +2118,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
     Cluster cluster = clusters.getCluster(clusterName);
 
-    LOG.info("Received action execution request"
-      + ", clusterName=" + actionRequest.getClusterName()
-      + ", request=" + actionRequest.toString());
-
-    ActionExecutionContext actionExecContext = getActionExecutionContext(actionRequest);
+    ActionExecutionContext actionExecContext = null;
     if (actionRequest.isCommand()) {
-      customCommandExecutionHelper.validateAction(actionRequest);
+      customCommandExecutionHelper.validateCustomCommand(actionRequest);
     } else {
-      actionExecutionHelper.validateAction(actionRequest);
+      actionExecContext = actionExecutionHelper.validateCustomAction(actionRequest, cluster);
     }
 
     Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(
@@ -2333,9 +2134,9 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     Map<String, String> params = createDefaultHostParams(cluster);
 
     if (actionRequest.isCommand()) {
-      customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, params);
+      customCommandExecutionHelper.addAction(actionRequest, stage, params);
     } else {
-      actionExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, params);
+      actionExecutionHelper.addAction(actionExecContext, stage, configs, hostsMap, params);
     }
 
     RoleCommandOrder rco = this.getRoleCommandOrder(cluster);
@@ -2512,6 +2313,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
       }
     }
   }
+  
 
   @Override
   public Set<StackVersionResponse> getStackVersions(

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index 2361af7..b9f62e4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -131,7 +131,6 @@ public class AmbariServer {
     return clusterController;
   }
 
-  @SuppressWarnings("deprecation")
   public void run() throws Exception {
     // Initialize meta info before heartbeat monitor
     ambariMetaInfo.init();

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index adb78c3..ae57af2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -37,6 +37,7 @@ import org.apache.ambari.server.controller.internal.HostComponentResourceProvide
 import org.apache.ambari.server.controller.internal.HostResourceProvider;
 import org.apache.ambari.server.controller.internal.ServiceResourceProvider;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
+import org.apache.ambari.server.orm.DBAccessorImpl;
 import org.apache.ambari.server.orm.PersistenceType;
 import org.apache.ambari.server.scheduler.ExecutionScheduler;
 import org.apache.ambari.server.scheduler.ExecutionSchedulerImpl;
@@ -70,13 +71,12 @@ import org.apache.ambari.server.state.scheduler.RequestExecutionImpl;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl;
 import org.springframework.security.crypto.password.PasswordEncoder;
 import org.springframework.security.crypto.password.StandardPasswordEncoder;
-
 import java.security.SecureRandom;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
-
 import com.google.gson.GsonBuilder;
+import org.apache.ambari.server.orm.DBAccessor;
 import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_JDBC_DDL_FILE;
 import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_ONLY;
 import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_OR_EXTEND;
@@ -247,6 +247,7 @@ public class ControllerModule extends AbstractModule {
     install(new FactoryModuleBuilder().build(RequestFactory.class));
 
     bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
+    bind(DBAccessor.class).to(DBAccessorImpl.class);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
index 3b5afcd..f8dd908 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.ambari.server.controller;
 
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
+import org.apache.ambari.server.utils.StageUtils;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -28,36 +28,38 @@ import java.util.Map;
  * Helper class to capture details used to create action or custom commands
  */
 public class ExecuteActionRequest {
-  private final String clusterName;
-  private final String commandName;
-  private final List<RequestResourceFilter> resourceFilters;
+  private String clusterName;
+  private String commandName;
   private String actionName;
+  private String serviceName;
+  private String componentName;
+  private List<String> hosts;
   private Map<String, String> parameters;
 
   public ExecuteActionRequest(String clusterName, String commandName,
-                              String actionName,
-                              List<RequestResourceFilter> resourceFilters,
-                              Map<String, String> parameters) {
-    this(clusterName, commandName, parameters);
+                              String actionName, String serviceName, String componentName,
+                              List<String> hosts, Map<String, String> parameters) {
+    this(clusterName, commandName, serviceName, parameters);
     this.actionName = actionName;
-    if (resourceFilters != null) {
-      this.resourceFilters.addAll(resourceFilters);
+    this.componentName = componentName;
+    if (hosts != null) {
+      this.hosts.addAll(hosts);
     }
   }
 
   /**
-   * Create an ExecuteActionRequest to execute a command.
-   * No filters.
+   * Create an ExecuteActionRequest to execute a command
    */
-  public ExecuteActionRequest(String clusterName, String commandName, Map<String, String> parameters) {
+  public ExecuteActionRequest(String clusterName, String commandName, String serviceName,
+                              Map<String, String> parameters) {
     this.clusterName = clusterName;
     this.commandName = commandName;
-    this.actionName = null;
+    this.serviceName = serviceName;
     this.parameters = new HashMap<String, String>();
     if (parameters != null) {
       this.parameters.putAll(parameters);
     }
-    this.resourceFilters = new ArrayList<RequestResourceFilter>();
+    this.hosts = new ArrayList<String>();
   }
 
   public String getClusterName() {
@@ -72,14 +74,22 @@ public class ExecuteActionRequest {
     return actionName;
   }
 
-  public List<RequestResourceFilter> getResourceFilters() {
-    return resourceFilters;
+  public String getServiceName() {
+    return serviceName;
+  }
+
+  public String getComponentName() {
+    return componentName;
   }
 
   public Map<String, String> getParameters() {
     return parameters;
   }
 
+  public List<String> getHosts() {
+    return hosts;
+  }
+
   public Boolean isCommand() {
     return actionName == null || actionName.isEmpty();
   }
@@ -91,7 +101,9 @@ public class ExecuteActionRequest {
         append(", action :" + actionName).
         append(", command :" + commandName).
         append(", inputs :" + parameters.toString()).
-        append(", resourceFilters: " + resourceFilters).
+        append(", targetService :" + serviceName).
+        append(", targetComponent :" + componentName).
+        append(", targetHosts :" + hosts.toString()).
         append(", clusterName :" + clusterName).toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
index b78f68f..5251b3d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
@@ -17,17 +17,14 @@
  */
 package org.apache.ambari.server.controller;
 
-import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.HostNotFoundException;
 import org.apache.ambari.server.RoleCommand;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Host;
@@ -35,6 +32,7 @@ import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
+
 import com.google.inject.Inject;
 import com.google.inject.Injector;
 
@@ -145,14 +143,11 @@ public class MaintenanceStateHelper {
     
     // return the first one, just like amc.createStages()
     RequestStatusResponse response = null;
-
-    RequestResourceFilter resourceFilter =
-      new RequestResourceFilter(NAGIOS_SERVICE, NAGIOS_COMPONENT, null);
-
+    
     for (String clusterName : clusterNames) {
       ExecuteActionRequest actionRequest = new ExecuteActionRequest(
-        clusterName, null, NAGIOS_ACTION_NAME,
-        Collections.singletonList(resourceFilter), params);
+          clusterName, RoleCommand.ACTIONEXECUTE.name(),
+          NAGIOS_ACTION_NAME, NAGIOS_SERVICE, NAGIOS_COMPONENT, null, params);
       
       if (null == response)
         response = amc.createAction(actionRequest, requestProperties);

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceFilter.java
deleted file mode 100644
index 8a492a2..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceFilter.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.server.controller.internal;
-
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class RequestResourceFilter {
-  private String serviceName;
-  private String componentName;
-  private final List<String> hostNames = new ArrayList<String>();
-
-  public RequestResourceFilter() {
-
-  }
-
-  public RequestResourceFilter(String serviceName, String componentName, List<String> hostNames) {
-    this.serviceName = serviceName;
-    this.componentName = componentName;
-    if (hostNames != null) {
-      this.hostNames.addAll(hostNames);
-    }
-  }
-
-  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
-  @JsonProperty("service_name")
-  public String getServiceName() {
-    return serviceName;
-  }
-
-  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
-  @JsonProperty("component_name")
-  public String getComponentName() {
-    return componentName;
-  }
-
-  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
-  @JsonProperty("hosts")
-  public List<String> getHostNames() {
-    return hostNames;
-  }
-
-  @Override
-  public String toString() {
-    return "RequestResourceFilter{" +
-      "serviceName='" + serviceName + '\'' +
-      ", componentName='" + componentName + '\'' +
-      ", hostNames=" + hostNames +
-      '}';
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
index 5b3ff8a..92d1476 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
@@ -17,14 +17,12 @@
  */
 package org.apache.ambari.server.controller.internal;
 
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
-import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.ExecuteActionRequest;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.controller.RequestStatusResponse;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
@@ -35,9 +33,9 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.state.Clusters;
 
-import java.lang.reflect.Type;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -64,7 +62,9 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
   public static final String REQUEST_SOURCE_SCHEDULE_HREF = "Requests/request_schedule/href";
   protected static final String REQUEST_TYPE_ID = "Requests/type";
   protected static final String REQUEST_INPUTS_ID = "Requests/inputs";
-  protected static final String REQUEST_RESOURCE_FILTER_ID = "Requests/resources";
+  protected static final String REQUEST_TARGET_SERVICE_ID = "Requests/target_service";
+  protected static final String REQUEST_TARGET_COMPONENT_ID = "Requests/target_component";
+  protected static final String REQUEST_TARGET_HOSTS_ID = "Requests/target_hosts";
   protected static final String REQUEST_CREATE_TIME_ID = "Requests/create_time";
   protected static final String REQUEST_START_TIME_ID = "Requests/start_time";
   protected static final String REQUEST_END_TIME_ID = "Requests/end_time";
@@ -77,11 +77,13 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
   protected static final String REQUEST_PROGRESS_PERCENT_ID = "Requests/progress_percent";
   protected static final String COMMAND_ID = "command";
   protected static final String ACTION_ID = "action";
+  protected static final String HOSTS_ID = "hosts";
+  protected static final String SERVICE_NAME_ID = "service_name";
+  protected static final String COMPONENT_NAME_ID = "component_name";
   protected static final String INPUTS_ID = "parameters";
   private static Set<String> pkPropertyIds =
       new HashSet<String>(Arrays.asList(new String[]{
           REQUEST_ID_PROPERTY_ID}));
-  private Gson gson = new Gson();
 
   // ----- Constructors ----------------------------------------------------
 
@@ -163,7 +165,6 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
   // ----- utility methods --------------------------------------------------
 
   // Get request to execute an action/command
-  @SuppressWarnings("unchecked")
   private ExecuteActionRequest getActionRequest(Request request) {
     Map<String, String> requestInfoProperties = request.getRequestInfoProperties();
     Map<String, Object> propertyMap = request.getProperties().iterator().next();
@@ -183,13 +184,20 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
       actionName = requestInfoProperties.get(ACTION_ID);
     }
 
-    List<RequestResourceFilter> resourceFilters = null;
-    Object resourceFilterObj = propertyMap.get(REQUEST_RESOURCE_FILTER_ID);
-    if (resourceFilterObj != null) {
-      Type filterListType = new TypeToken<List<RequestResourceFilter>>(){}.getType();
-      resourceFilters = gson.fromJson((String) resourceFilterObj, filterListType);
+    String hostList = requestInfoProperties.get(HOSTS_ID);
+    List<String> hosts = new ArrayList<String>();
+    if (hostList != null && !hostList.isEmpty()) {
+      for (String hostname : hostList.split(",")) {
+        String trimmedName = hostname.trim();
+        if (!trimmedName.isEmpty()) {
+          hosts.add(hostname.trim());
+        }
+      }
     }
 
+    String serviceName = requestInfoProperties.get(SERVICE_NAME_ID);
+    String componentName = requestInfoProperties.get(COMPONENT_NAME_ID);
+
     Map<String, String> params = new HashMap<String, String>();
     String keyPrefix = "/" + INPUTS_ID + "/";
     for (String key : requestInfoProperties.keySet()) {
@@ -199,11 +207,13 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
     }
 
     return new ExecuteActionRequest(
-      (String) propertyMap.get(REQUEST_CLUSTER_NAME_PROPERTY_ID),
-      commandName,
-      actionName,
-      resourceFilters,
-      params);
+        (String) propertyMap.get(REQUEST_CLUSTER_NAME_PROPERTY_ID),
+        commandName,
+        actionName,
+        serviceName,
+        componentName,
+        hosts,
+        params);
   }
 
   // Get all of the request resources for the given properties
@@ -276,7 +286,9 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
     setResourceProperty(resource, REQUEST_CONTEXT_ID, request.getRequestContext(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_TYPE_ID, request.getRequestType(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_INPUTS_ID, request.getInputs(), requestedPropertyIds);
-    setResourceProperty(resource, REQUEST_RESOURCE_FILTER_ID, request.getResourceFilters(), requestedPropertyIds);
+    setResourceProperty(resource, REQUEST_TARGET_SERVICE_ID, request.getTargetService(), requestedPropertyIds);
+    setResourceProperty(resource, REQUEST_TARGET_COMPONENT_ID, request.getTargetComponent(), requestedPropertyIds);
+    setResourceProperty(resource, REQUEST_TARGET_HOSTS_ID, request.getTargetHosts(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_CREATE_TIME_ID, request.getCreateTime(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_START_TIME_ID, request.getStartTime(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_END_TIME_ID, request.getEndTime(), requestedPropertyIds);
@@ -409,5 +421,4 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
 
     return resource;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
index 17fbe2f..072b4ed 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
@@ -29,6 +29,7 @@ import javax.persistence.EnumType;
 import javax.persistence.Enumerated;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
+import javax.persistence.Lob;
 import javax.persistence.ManyToOne;
 import javax.persistence.OneToMany;
 import javax.persistence.Table;
@@ -62,6 +63,18 @@ public class RequestEntity {
   @Basic
   private String inputs;
 
+  @Column(name = "target_service")
+  @Basic
+  private String targetService;
+
+  @Column(name = "target_component")
+  @Basic
+  private String targetComponent;
+
+  @Column(name = "target_hosts")
+  @Lob
+  private String targetHosts;
+
   @Column(name = "request_type")
   @Enumerated(value = EnumType.STRING)
   private RequestType requestType;
@@ -85,9 +98,6 @@ public class RequestEntity {
   @OneToMany(mappedBy = "request")
   private Collection<StageEntity> stages;
 
-  @OneToMany(mappedBy = "requestEntity")
-  private Collection<RequestResourceFilterEntity> resourceFilterEntities;
-
   @ManyToOne(cascade = {CascadeType.MERGE})
   @JoinColumn(name = "cluster_id", referencedColumnName = "cluster_id")
   private ClusterEntity cluster;
@@ -160,6 +170,30 @@ public class RequestEntity {
     this.inputs = inputs;
   }
 
+  public String getTargetService() {
+    return targetService;
+  }
+
+  public void setTargetService(String targetService) {
+    this.targetService = targetService;
+  }
+
+  public String getTargetComponent() {
+    return targetComponent;
+  }
+
+  public void setTargetComponent(String targetComponent) {
+    this.targetComponent = targetComponent;
+  }
+
+  public String getTargetHosts() {
+    return targetHosts;
+  }
+
+  public void setTargetHosts(String targetHosts) {
+    this.targetHosts = targetHosts;
+  }
+
   public RequestType getRequestType() {
     return requestType;
   }
@@ -172,17 +206,8 @@ public class RequestEntity {
     return clusterId;
   }
 
-  public Collection<RequestResourceFilterEntity> getResourceFilterEntities() {
-    return resourceFilterEntities;
-  }
-
-  public void setResourceFilterEntities(Collection<RequestResourceFilterEntity> resourceFilterEntities) {
-    this.resourceFilterEntities = resourceFilterEntities;
-  }
-
   public void setClusterId(Long clusterId) {
     this.clusterId = clusterId;
-
   }
 
   public String getCommandName() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
deleted file mode 100644
index e03b3b6..0000000
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.server.orm.entities;
-
-import javax.persistence.Basic;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.Lob;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-
-@Entity
-@Table(name = "requestresourcefilter")
-public class RequestResourceFilterEntity {
-
-  @Column(name = "request_id", nullable = false, insertable = true, updatable = true)
-  @Id
-  private Long requestId;
-
-  @Column(name = "service")
-  @Basic
-  @Id
-  private String serviceName;
-
-  @Column(name = "component")
-  @Basic
-  @Id
-  private String componentName;
-
-  @Column(name = "hosts")
-  @Lob
-  private String hosts;
-
-  @ManyToOne
-  @JoinColumn(name = "request_id", referencedColumnName = "request_id", nullable = false, insertable = false, updatable = false)
-  private RequestEntity requestEntity;
-
-  public String getServiceName() {
-    return serviceName;
-  }
-
-  public void setServiceName(String serviceName) {
-    this.serviceName = serviceName;
-  }
-
-  public String getComponentName() {
-    return componentName;
-  }
-
-  public void setComponentName(String componentName) {
-    this.componentName = componentName;
-  }
-
-  public String getHosts() {
-    return hosts;
-  }
-
-  public void setHosts(String hosts) {
-    this.hosts = hosts;
-  }
-
-  public Long getRequestId() {
-    return requestId;
-  }
-
-  public void setRequestId(Long requestId) {
-    this.requestId = requestId;
-  }
-
-  public RequestEntity getRequestEntity() {
-    return requestEntity;
-  }
-
-  public void setRequestEntity(RequestEntity request) {
-    this.requestEntity = request;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
index 33e49ac..f104259 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
@@ -200,15 +200,6 @@ public class UpgradeCatalog150 extends AbstractUpgradeCatalog {
 
     dbAccessor.createTable("hostgroup_component", columns, "blueprint_name", "hostgroup_name", "name");
 
-    // RequestResourceFilter
-    columns.clear();
-    columns.add(new DBColumnInfo("request_id", Long.class, null, null, false));
-    columns.add(new DBColumnInfo("service_name", String.class, 255, null, true));
-    columns.add(new DBColumnInfo("component_name", String.class, 255, null, true));
-    columns.add(new DBColumnInfo("hosts", byte[].class, null, null, true));
-
-    dbAccessor.createTable("requestresourcefilter", columns, "request_id", "service_name", "component_name");
-
     createQuartzTables();
 
     // ========================================================================
@@ -258,7 +249,6 @@ public class UpgradeCatalog150 extends AbstractUpgradeCatalog {
     dbAccessor.addFKConstraint("confgrouphostmapping", "FK_cghostm_configgroup_id", "config_group_id", "configgroup", "group_id", true);
     dbAccessor.addFKConstraint("confgrouphostmapping", "FK_cghostm_host_name", "host_name", "hosts", "host_name", true);
     dbAccessor.addFKConstraint("clusterconfigmapping", "FK_clustercfgmap_cluster_id", "cluster_id", "clusters", "cluster_id", true);
-    dbAccessor.addFKConstraint("requestresourcefilter", "FK_requestresourcefilter_req_id", "request_id", "request", "request_id", true);
 
     // ========================================================================
     // Finally update schema version

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 2224a85..2e97b08 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -42,8 +42,7 @@ CREATE TABLE execution_command (task_id BIGINT NOT NULL, command LONGBLOB, PRIMA
 CREATE TABLE host_role_command (task_id BIGINT NOT NULL, attempt_count SMALLINT NOT NULL, event LONGTEXT NOT NULL, exitcode INTEGER NOT NULL, host_name VARCHAR(255) NOT NULL, last_attempt_time BIGINT NOT NULL, request_id BIGINT NOT NULL, role VARCHAR(255), role_command VARCHAR(255), stage_id BIGINT NOT NULL, start_time BIGINT NOT NULL, end_time BIGINT, status VARCHAR(255), std_error LONGBLOB, std_out LONGBLOB, structured_out LONGBLOB, command_detail VARCHAR(255), custom_command_name VARCHAR(255), PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGINT NOT NULL, stage_id BIGINT NOT NULL, success_factor DOUBLE NOT NULL, PRIMARY KEY (role, request_id, stage_id));
 CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info LONGBLOB, PRIMARY KEY (stage_id, request_id));
-CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, request_schedule_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs LONGTEXT, request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
-CREATE TABLE requestresourcefilter (request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), hosts LONGTEXT, PRIMARY KEY (request_id, service_name, component_name));
+CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, request_schedule_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs LONGTEXT, request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts LONGTEXT, target_service VARCHAR(255), PRIMARY KEY (request_id));
 CREATE TABLE key_value_store (`key` VARCHAR(255), `value` LONGTEXT, PRIMARY KEY (`key`));
 CREATE TABLE clusterconfigmapping (type_name VARCHAR(255) NOT NULL, create_timestamp BIGINT NOT NULL, cluster_id BIGINT NOT NULL, selected INTEGER NOT NULL DEFAULT 0, version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL DEFAULT '_db', PRIMARY KEY (type_name, create_timestamp, cluster_id));
 CREATE TABLE hostconfigmapping (create_timestamp BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, type_name VARCHAR(255) NOT NULL, selected INTEGER NOT NULL DEFAULT 0, service_name VARCHAR(255), version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL DEFAULT '_db', PRIMARY KEY (create_timestamp, host_name, cluster_id, type_name));
@@ -94,7 +93,6 @@ ALTER TABLE configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_host
 ALTER TABLE requestschedulebatchrequest ADD CONSTRAINT FK_requestschedulebatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
 ALTER TABLE hostgroup ADD FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name);
 ALTER TABLE hostgroup_component ADD FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES hostgroup(blueprint_name, name);
-ALTER TABLE requestresourcefilter ADD CONSTRAINT FK_requestresourcefilter_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
 
 
 INSERT INTO ambari_sequences(sequence_name, value) values ('cluster_id_seq', 1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 637a149..e0e6927 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -32,8 +32,7 @@ CREATE TABLE execution_command (task_id NUMBER(19) NOT NULL, command BLOB NULL,
 CREATE TABLE host_role_command (task_id NUMBER(19) NOT NULL, attempt_count NUMBER(5) NOT NULL, event CLOB NULL, exitcode NUMBER(10) NOT NULL, host_name VARCHAR2(255) NOT NULL, last_attempt_time NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, role VARCHAR2(255) NULL, role_command VARCHAR2(255) NULL, stage_id NUMBER(19) NOT NULL, start_time NUMBER(19) NOT NULL, end_time NUMBER(19), status VARCHAR2(255) NULL, std_error BLOB NULL, std_out BLOB NULL, structured_out BLOB NULL,  command_detail VARCHAR2(255) NULL, custom_command_name VARCHAR2(255) NULL, PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR2(255) NOT NULL, request_id NUMBER(19) NOT NULL, stage_id NUMBER(19) NOT NULL, success_factor NUMBER(19,4) NOT NULL, PRIMARY KEY (role, request_id, stage_id));
 CREATE TABLE stage (stage_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19) NULL, log_info VARCHAR2(255) NULL, request_context VARCHAR2(255) NULL, cluster_host_info BLOB NOT NULL, PRIMARY KEY (stage_id, request_id));
-CREATE TABLE request (request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19), request_schedule_id NUMBER(19), command_name VARCHAR(255), create_time NUMBER(19) NOT NULL, end_time NUMBER(19) NOT NULL, inputs CLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time NUMBER(19) NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
-CREATE TABLE requestresourcefilter (request_id NUMBER(19) NOT NULL, service_name VARCHAR2(255), component_name VARCHAR2(255), hosts CLOB, PRIMARY KEY (request_id, service_name, component_name));
+CREATE TABLE request (request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19), request_schedule_id NUMBER(19), command_name VARCHAR(255), create_time NUMBER(19) NOT NULL, end_time NUMBER(19) NOT NULL, inputs CLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time NUMBER(19) NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts CLOB, target_service VARCHAR(255), PRIMARY KEY (request_id));
 CREATE TABLE key_value_store ("key" VARCHAR2(255) NOT NULL, "value" CLOB NULL, PRIMARY KEY ("key"));
 CREATE TABLE clusterconfigmapping (type_name VARCHAR2(255) NOT NULL, create_timestamp NUMBER(19) NOT NULL, cluster_id NUMBER(19) NOT NULL, selected NUMBER(10) NOT NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', PRIMARY KEY (type_name, create_timestamp, cluster_id));
 CREATE TABLE hostconfigmapping (create_timestamp NUMBER(19) NOT NULL, host_name VARCHAR2(255) NOT NULL, cluster_id NUMBER(19) NOT NULL, type_name VARCHAR2(255) NOT NULL, selected NUMBER(10) NOT NULL, service_name VARCHAR2(255) NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', PRIMARY KEY (create_timestamp, host_name, cluster_id, type_name));
@@ -82,7 +81,6 @@ ALTER TABLE configgrouphostmapping ADD CONSTRAINT FK_cghm_hname FOREIGN KEY (hos
 ALTER TABLE requestschedulebatchrequest ADD CONSTRAINT FK_rsbatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES requestschedule (schedule_id);
 ALTER TABLE hostgroup ADD FOREIGN KEY (blueprint_name) REFERENCES ambari.blueprint(blueprint_name);
 ALTER TABLE hostgroup_component ADD FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES ambari.hostgroup(blueprint_name, name);
-ALTER TABLE requestresourcefilter ADD CONSTRAINT FK_requestresourcefilter_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
 
 INSERT INTO ambari_sequences(sequence_name, value) values ('host_role_command_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, value) values ('user_id_seq', 1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 3db04b4..60462e2 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -79,10 +79,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.role_success_criteria TO :username;
 CREATE TABLE ambari.stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info BYTEA NOT NULL, PRIMARY KEY (stage_id, request_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.stage TO :username;
 
-CREATE TABLE ambari.request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs VARCHAR(32000), request_context VARCHAR(255), request_type VARCHAR(255), request_schedule_id BIGINT, start_time BIGINT NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
-GRANT ALL PRIVILEGES ON TABLE ambari.request TO :username;
-
-CREATE TABLE ambari.requestresourcefilter (request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), hosts TEXT, PRIMARY KEY (request_id, service_name, component_name));
+CREATE TABLE ambari.request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs VARCHAR(32000), request_context VARCHAR(255), request_type VARCHAR(255), request_schedule_id BIGINT, start_time BIGINT NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts TEXT, target_service VARCHAR(255), PRIMARY KEY (request_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.request TO :username;
 
 CREATE TABLE ambari.ClusterHostMapping (cluster_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, host_name));
@@ -101,6 +98,7 @@ CREATE TABLE ambari.metainfo ("metainfo_key" VARCHAR(255), "metainfo_value" VARC
 GRANT ALL PRIVILEGES ON TABLE ambari.metainfo TO :username;
 
 CREATE TABLE ambari.ambari_sequences (sequence_name VARCHAR(255) PRIMARY KEY, "value" BIGINT NOT NULL);
+
 GRANT ALL PRIVILEGES ON TABLE ambari.ambari_sequences TO :username;
 
 CREATE TABLE ambari.configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, group_name VARCHAR(255) NOT NULL, tag VARCHAR(1024) NOT NULL, description VARCHAR(1024), create_timestamp BIGINT NOT NULL, PRIMARY KEY(group_id));
@@ -159,7 +157,6 @@ ALTER TABLE ambari.configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmappi
 ALTER TABLE ambari.requestschedulebatchrequest ADD CONSTRAINT FK_requestschedulebatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
 ALTER TABLE ambari.hostgroup ADD FOREIGN KEY (blueprint_name) REFERENCES ambari.blueprint(blueprint_name);
 ALTER TABLE ambari.hostgroup_component ADD FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES ambari.hostgroup(blueprint_name, name);
-ALTER TABLE ambari.requestresourcefilter ADD CONSTRAINT FK_requestresourcefilter_req_id FOREIGN KEY (request_id) REFERENCES ambari.request (request_id);
 
 
 ---------inserting some data-----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/META-INF/persistence.xml b/ambari-server/src/main/resources/META-INF/persistence.xml
index b6c1bd9..c85d79d 100644
--- a/ambari-server/src/main/resources/META-INF/persistence.xml
+++ b/ambari-server/src/main/resources/META-INF/persistence.xml
@@ -38,12 +38,12 @@
     <class>org.apache.ambari.server.orm.entities.ConfigGroupEntity</class>
     <class>org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity</class>
     <class>org.apache.ambari.server.orm.entities.ConfigGroupHostMappingEntity</class>
+    <class>org.apache.ambari.server.orm.entities.ActionEntity</class>
     <class>org.apache.ambari.server.orm.entities.RequestScheduleEntity</class>
     <class>org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity</class>
     <class>org.apache.ambari.server.orm.entities.BlueprintEntity</class>
     <class>org.apache.ambari.server.orm.entities.HostGroupEntity</class>
     <class>org.apache.ambari.server.orm.entities.HostGroupComponentEntity</class>
-    <class>org.apache.ambari.server.orm.entities.RequestResourceFilterEntity</class>
 
     <properties>
       <!--<property name="javax.persistence.jdbc.url" value="jdbc:postgresql://localhost/ambari" />-->

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index d3987b5..270353b 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -103,7 +103,9 @@
         "Requests/request_schedule",
         "Requests/type",
         "Requests/inputs",
-        "Requests/resources",
+        "Requests/target_service",
+        "Requests/target_component",
+        "Requests/target_hosts",
         "Requests/create_time",
         "Requests/start_time",
         "Requests/end_time",

http://git-wip-us.apache.org/repos/asf/ambari/blob/668d4c26/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
index 344891d..d052673 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
@@ -30,7 +30,6 @@ import org.apache.ambari.server.agent.ActionQueue;
 import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.controller.ExecuteActionRequest;
 import org.apache.ambari.server.controller.HostsMap;
-import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.ExecutionCommandDAO;
@@ -367,11 +366,8 @@ public class TestActionDBAccessorImpl {
             hostname, System.currentTimeMillis()), "cluster1", "HBASE");
     List<Stage> stages = new ArrayList<Stage>();
     stages.add(s);
-    final RequestResourceFilter resourceFilter = new RequestResourceFilter("HBASE", "HBASE_MASTER", null);
-    List<RequestResourceFilter> resourceFilters = new
-      ArrayList<RequestResourceFilter>() {{ add(resourceFilter); }};
-    ExecuteActionRequest executeActionRequest = new ExecuteActionRequest
-      ("cluster1", null, actionName, resourceFilters, null);
+    ExecuteActionRequest executeActionRequest = new ExecuteActionRequest("cluster1", null, actionName, "HBASE",
+        "HBASE_MASTER", null, null);
     Request request = new Request(stages, clusters);
     db.persistActions(request);
   }