You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2014/02/25 19:20:32 UTC

[1/3] AMBARI-4791. API call to restart all components on one or more hosts should result in one request. (swagle)

Repository: ambari
Updated Branches:
  refs/heads/trunk 0b155481f -> c6987eeef


http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index dc4c5a5..2e114ef 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -73,6 +73,7 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.ComponentResourceProviderTest;
 import org.apache.ambari.server.controller.internal.HostResourceProviderTest;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.controller.internal.ServiceResourceProviderTest;
 import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
@@ -2348,8 +2349,12 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h2");
       put("align_maintenance_state", "true");
     }};
-    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HBASE", "HBASE_MASTER",
-        null, params);
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HBASE", "HBASE_MASTER", null);
+    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
+    resourceFilters.add(resourceFilter);
+
+    ExecuteActionRequest request = new ExecuteActionRequest(clusterName,
+      "DECOMMISSION", null, resourceFilters, params);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -2390,10 +2395,11 @@ public class AmbariManagementControllerTest {
           put("slave_type", "HBASE_REGIONSERVER");
           put("align_maintenance_state", "true");
         }};
-    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HBASE", "HBASE_MASTER", null, params);
+    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
+    resourceFilter = new RequestResourceFilter("HBASE", "HBASE_MASTER", null);
+    request.getResourceFilters().add(resourceFilter);
 
-    response = controller.createAction(request,
-        requestProperties);
+    response = controller.createAction(request, requestProperties);
 
     storedTasks = actionDB.getRequestTasks(response.getRequestId());
     execCmd = storedTasks.get(0).getExecutionCommandWrapper
@@ -2416,8 +2422,8 @@ public class AmbariManagementControllerTest {
     params = new HashMap<String, String>() {{
       put("included_hosts", "h2");
     }};
-    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HBASE", "HBASE_MASTER",
-        null, params);
+    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null,
+      resourceFilters, params);
 
     response = controller.createAction(request,
         requestProperties);
@@ -3842,8 +3848,11 @@ public class AmbariManagementControllerTest {
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
 
     ArrayList<String> hosts = new ArrayList<String>() {{add("h1");}};
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", "DATANODE", hosts);
+    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
+    resourceFilters.add(resourceFilter);
 
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", "HDFS", "DATANODE", hosts, params);
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
     RequestStatusResponse response = controller.createAction(actionRequest, requestProperties);
     assertEquals(1, response.getTasks().size());
     ShortTaskStatus taskStatus = response.getTasks().get(0);
@@ -3859,10 +3868,13 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals("h1", task.getHostName());
     ExecutionCommand cmd = task.getExecutionCommandWrapper().getExecutionCommand();
     Assert.assertTrue(cmd.getCommandParams().containsKey("test"));
-    Assert.assertEquals(cmd.getServiceName(), "HDFS");
-    Assert.assertEquals(cmd.getComponentName(), "DATANODE");
+    Assert.assertEquals("HDFS", cmd.getServiceName());
+    Assert.assertEquals("DATANODE", cmd.getComponentName());
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a2", "", "", null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("", "", null);
+    resourceFilters.add(resourceFilter);
+    actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
     response = controller.createAction(actionRequest, requestProperties);
     assertEquals(2, response.getTasks().size());
 
@@ -3870,18 +3882,27 @@ public class AmbariManagementControllerTest {
     task = storedTasks2.get(1);
     Assert.assertEquals(RoleCommand.ACTIONEXECUTE, task.getRoleCommand());
     Assert.assertEquals("a2", task.getRole().name());
-    HashSet<String> expectedHosts = new HashSet<String>(){{add("h2"); add("h1");}};
-    HashSet<String> actualHosts = new HashSet<String>(){{add(storedTasks2.get(1).getHostName()); add(storedTasks2
-        .get(0).getHostName());}};
+    HashSet<String> expectedHosts = new HashSet<String>() {{
+      add("h2");
+      add("h1");
+    }};
+    HashSet<String> actualHosts = new HashSet<String>() {{
+      add(storedTasks2.get(1).getHostName());
+      add(storedTasks2.get(0).getHostName());
+    }};
     Assert.assertEquals(expectedHosts, actualHosts);
 
     cmd = task.getExecutionCommandWrapper().getExecutionCommand();
     Assert.assertTrue(cmd.getCommandParams().containsKey("test"));
-    Assert.assertEquals(cmd.getServiceName(), "HDFS");
-    Assert.assertEquals(cmd.getComponentName(), "DATANODE");
+    Assert.assertEquals("HDFS", cmd.getServiceName());
+    Assert.assertEquals("DATANODE", cmd.getComponentName());
 
     hosts = new ArrayList<String>() {{add("h3");}};
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", "", "", hosts, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("", "", hosts);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
     response = controller.createAction(actionRequest, requestProperties);
     assertEquals(1, response.getTasks().size());
     taskStatus = response.getTasks().get(0);
@@ -3951,10 +3972,13 @@ public class AmbariManagementControllerTest {
     Map<String, String> params = new HashMap<String, String>() {{
       put("test", "test");
     }};
+    RequestResourceFilter resourceFilter = new RequestResourceFilter(
+      "HDFS",
+      "HDFS_CLIENT",
+      new ArrayList<String>() {{ add("h1"); }});
     ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1",
-      "RESTART", null, "HDFS", "HDFS_CLIENT",
-      new ArrayList<String>() {{ add("h1"); }},
-      params);
+      "RESTART", params);
+    actionRequest.getResourceFilters().add(resourceFilter);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -4031,34 +4055,50 @@ public class AmbariManagementControllerTest {
     Map<String, String> params = new HashMap<String, String>() {{
       put("test", "test");
     }};
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", "NON_EXISTENT_CHECK", "HDFS", params);
+
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
+
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", "NON_EXISTENT_CHECK", params);
+    actionRequest.getResourceFilters().add(resourceFilter);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
 
     expectActionCreationErrorWithMessage(actionRequest, requestProperties, "Unsupported action");
 
-    actionRequest = new ExecuteActionRequest("c1", "NON_EXISTENT_SERVICE_CHECK", "HDFS", params);
-    expectActionCreationErrorWithMessage(actionRequest, requestProperties,
-        "Unsupported action");
+    //actionRequest = new ExecuteActionRequest("c1", "NON_EXISTENT_SERVICE_CHECK", "HDFS", params);
+    //expectActionCreationErrorWithMessage(actionRequest, requestProperties, "Unsupported action");
 
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION_DATANODE", "HDFS", params);
-    expectActionCreationErrorWithMessage(actionRequest, requestProperties,
-        "Unsupported action DECOMMISSION_DATANODE for Service: HDFS and Component: null");
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION_DATANODE", params);
+    actionRequest.getResourceFilters().add(resourceFilter);
 
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", "HDFS", params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
-        "Unsupported action DECOMMISSION for Service: HDFS and Component: null");
+      "Unsupported action DECOMMISSION_DATANODE for Service: HDFS and Component: null");
+
+    //actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", "HDFS", params);
+    //expectActionCreationErrorWithMessage(actionRequest, requestProperties, "Unsupported action DECOMMISSION for Service: HDFS and Component: null");
+
+    resourceFilter = new RequestResourceFilter("HDFS", "HDFS_CLIENT", null);
+    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params);
 
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "HDFS_CLIENT", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Unsupported action DECOMMISSION for Service: HDFS and Component: HDFS_CLIENT");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "DECOMMISSION_DATANODE", "HDFS", null, null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS", null, null);
+    resourceFilters.add(resourceFilter);
+    actionRequest = new ExecuteActionRequest("c1", null, "DECOMMISSION_DATANODE", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action DECOMMISSION_DATANODE does not exist");
 
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "YARN", "RESOURCEMANAGER", null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("YARN", "RESOURCEMANAGER", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Service not found, clusterName=c1, serviceName=YARN");
 
@@ -4066,7 +4106,12 @@ public class AmbariManagementControllerTest {
       put("included_hosts", "h1,h2");
       put("excluded_hosts", "h1,h3");
     }};
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", null, params2);
+
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Same host cannot be specified for inclusion as well as exclusion. Hosts: [h1]");
 
@@ -4075,13 +4120,21 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h4, h3");
       put("slave_type", "HDFS_CLIENT");
     }};
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", null, params2);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Component HDFS_CLIENT is not supported for decommissioning.");
 
     List<String> hosts = new ArrayList<String>();
     hosts.add("h6");
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", hosts, params2);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", hosts);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Decommission command cannot be issued with target host(s) specified.");
 
@@ -4089,7 +4142,11 @@ public class AmbariManagementControllerTest {
     params2 = new HashMap<String, String>() {{
       put("excluded_hosts", "h1 ");
     }};
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", null, params2);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Component DATANODE on host h1 cannot be decommissioned as its not in STARTED state");
 
@@ -4097,7 +4154,7 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h1 ");
       put("mark_draining_only", "true");
     }};
-    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, "HDFS", "NAMENODE", null, params2);
+    actionRequest = new ExecuteActionRequest("c1", "DECOMMISSION", null, resourceFilters, params2);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "mark_draining_only is not a valid parameter for NAMENODE");
 
@@ -4117,58 +4174,91 @@ public class AmbariManagementControllerTest {
         "a4", ActionType.SYSTEM, "", "HIVE", "", "Does file exist",
         TargetHostType.ANY, Short.valueOf("100")));
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, null, null);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 requires input 'test' that is not provided");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, null, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 requires input 'dirName' that is not provided");
 
     params.put("dirName", "dirName");
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, null, null, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", null, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 requires explicit target host(s)");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a2", "HIVE", null, null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HIVE", null, null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a2 targets service HIVE that does not match with expected HDFS");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a2", "HDFS", "HDFS_CLIENT", null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS", "HDFS_CLIENT", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a2 targets component HDFS_CLIENT that does not match with expected DATANODE");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", "HDFS2", "HDFS_CLIENT", null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS2", "HDFS_CLIENT", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 targets service HDFS2 that does not exist");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", "HDFS", "HDFS_CLIENT2", null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS", "HDFS_CLIENT2", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 targets component HDFS_CLIENT2 that does not exist");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a1", "", "HDFS_CLIENT2", null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("", "HDFS_CLIENT2", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a1 targets component HDFS_CLIENT2 without specifying the target service");
 
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("", "", null);
+    resourceFilters.add(resourceFilter);
+
     // targets a service that is not a member of the stack (e.g. MR not in HDP-2)
-    actionRequest = new ExecuteActionRequest("c1", null, "a3", "", "", null, params);
+    actionRequest = new ExecuteActionRequest("c1", null, "a3", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Action a3 targets service MAPREDUCE that does not exist");
 
     hosts = new ArrayList<String>();
     hosts.add("h6");
-    actionRequest = new ExecuteActionRequest("c1", null, "a2", "", "", hosts, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("", "", hosts);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", null, "a2", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Request specifies host h6 but its not a valid host based on the target service=HDFS and component=DATANODE");
 
-    actionRequest = new ExecuteActionRequest("c1", null, "a4", "HIVE", "", null, params);
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HIVE", "", null);
+    resourceFilters.add(resourceFilter);
+
+    actionRequest = new ExecuteActionRequest("c1", null, "a4", resourceFilters, params);
     expectActionCreationErrorWithMessage(actionRequest, requestProperties,
         "Suitable hosts not found, component=, service=HIVE, cluster=c1, actionName=a4");
 
   }
 
   private void expectActionCreationErrorWithMessage(ExecuteActionRequest actionRequest,
-                                                    Map<String, String> requestProperties, String message) {
+                                                    Map<String, String> requestProperties,
+                                                    String message) {
     try {
       RequestStatusResponse response = controller.createAction(actionRequest, requestProperties);
       Assert.fail("createAction should fail");
@@ -4220,7 +4310,9 @@ public class AmbariManagementControllerTest {
     Map<String, String> params = new HashMap<String, String>() {{
       put("test", "test");
     }};
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", Role.HDFS_SERVICE_CHECK.name(), "HDFS", params);
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", Role.HDFS_SERVICE_CHECK.name(), params);
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
+    actionRequest.getResourceFilters().add(resourceFilter);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -4249,7 +4341,9 @@ public class AmbariManagementControllerTest {
     assertNull(hostRoleCommand.getCustomCommandName());
 
     assertEquals(task.getTaskId(), hostRoleCommand.getTaskId());
-    assertEquals(actionRequest.getServiceName(), hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getServiceName());
+    assertNotNull(actionRequest.getResourceFilters());
+    RequestResourceFilter requestResourceFilter = actionRequest.getResourceFilters().get(0);
+    assertEquals(resourceFilter.getServiceName(), hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getServiceName());
     assertEquals(actionRequest.getClusterName(), hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getClusterName());
     assertEquals(actionRequest.getCommandName(), hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getRole());
     assertEquals(Role.HDFS_CLIENT.name(), hostRoleCommand.getEvent().getEvent().getServiceComponentName());
@@ -4257,7 +4351,10 @@ public class AmbariManagementControllerTest {
     assertNotNull(hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getConfigurations());
     assertEquals(2, hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getConfigurations().size());
     assertEquals(requestProperties.get(REQUEST_CONTEXT_PROPERTY), stage.getRequestContext());
-    actionRequest = new ExecuteActionRequest("c1", Role.MAPREDUCE_SERVICE_CHECK.name(), "MAPREDUCE", null);
+
+    actionRequest = new ExecuteActionRequest("c1", Role.MAPREDUCE_SERVICE_CHECK.name(), null);
+    resourceFilter = new RequestResourceFilter("MAPREDUCE", null, null);
+    actionRequest.getResourceFilters().add(resourceFilter);
 
     response = controller.createAction(actionRequest, requestProperties);
 
@@ -5562,8 +5659,9 @@ public class AmbariManagementControllerTest {
     }
     Assert.assertEquals("Expect only one service check.", 1, commandCount);
 
-    ExecuteActionRequest actionRequest = new ExecuteActionRequest("foo1", Role.HDFS_SERVICE_CHECK.name(),
-        null, "HDFS", null, null, null);
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("foo1", Role.HDFS_SERVICE_CHECK.name(), null);
+    actionRequest.getResourceFilters().add(resourceFilter);
     Map<String, String> requestProperties = new HashMap<String, String>();
 
     RequestStatusResponse response = controller.createAction(actionRequest, requestProperties);
@@ -5959,8 +6057,9 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h2");
       put("align_maintenance_state", "true");
     }};
-    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HDFS", "NAMENODE",
-        null, params);
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
+    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
+    request.getResourceFilters().add(resourceFilter);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -5992,7 +6091,9 @@ public class AmbariManagementControllerTest {
       put("excluded_hosts", "h1");
       put("align_maintenance_state", "true");
     }};
-    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HDFS", "NAMENODE", null, params);
+    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
+    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
+    request.getResourceFilters().add(resourceFilter);
 
     response = controller.createAction(request,
         requestProperties);
@@ -6033,7 +6134,9 @@ public class AmbariManagementControllerTest {
       put("included_hosts", "h1 , h2");
       put("align_maintenance_state", "true");
     }};
-    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null, "HDFS", "NAMENODE", null, params);
+    resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
+    request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
+    request.getResourceFilters().add(resourceFilter);
 
     response = controller.createAction(request,
         requestProperties);
@@ -6068,6 +6171,209 @@ public class AmbariManagementControllerTest {
   }
 
   @Test
+  public void testResourceFiltersWithCustomActions() throws AmbariException {
+    setupClusterWithHosts("c1", "HDP-2.0.6",
+      new ArrayList<String>() {{
+        add("h1");
+        add("h2");
+        add("h3");
+      }},
+      "centos6");
+
+    Cluster cluster = clusters.getCluster("c1");
+    cluster.setDesiredStackVersion(new StackId("HDP-2.0.6"));
+    cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
+
+    ConfigFactory cf = injector.getInstance(ConfigFactory.class);
+    Config config1 = cf.createNew(cluster, "global",
+      new HashMap<String, String>() {{
+        put("key1", "value1");
+      }});
+    config1.setVersionTag("version1");
+
+    Config config2 = cf.createNew(cluster, "core-site",
+      new HashMap<String, String>() {{
+        put("key1", "value1");
+      }});
+    config2.setVersionTag("version1");
+
+    cluster.addConfig(config1);
+    cluster.addConfig(config2);
+
+    Service hdfs = cluster.addService("HDFS");
+    hdfs.persist();
+
+    Service mapred = cluster.addService("YARN");
+    mapred.persist();
+
+    hdfs.addServiceComponent(Role.HDFS_CLIENT.name()).persist();
+    hdfs.addServiceComponent(Role.NAMENODE.name()).persist();
+    hdfs.addServiceComponent(Role.DATANODE.name()).persist();
+
+    mapred.addServiceComponent(Role.RESOURCEMANAGER.name()).persist();
+
+    hdfs.getServiceComponent(Role.HDFS_CLIENT.name()).addServiceComponentHost("h1").persist();
+    hdfs.getServiceComponent(Role.NAMENODE.name()).addServiceComponentHost("h1").persist();
+    hdfs.getServiceComponent(Role.DATANODE.name()).addServiceComponentHost("h1").persist();
+    hdfs.getServiceComponent(Role.DATANODE.name()).addServiceComponentHost("h2").persist();
+
+    controller.getAmbariMetaInfo().addActionDefinition(new ActionDefinition(
+      "a1", ActionType.SYSTEM, "", "HDFS", "", "Some custom action.",
+      TargetHostType.ALL, Short.valueOf("100")));
+
+    Map<String, String> params = new HashMap<String, String>() {{
+      put("test", "test");
+    }};
+
+    Map<String, String> requestProperties = new HashMap<String, String>();
+    requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
+
+    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
+    ArrayList<String> hosts = new ArrayList<String>() {{ add("h2"); }};
+    RequestResourceFilter resourceFilter1 = new RequestResourceFilter("HDFS", "DATANODE", hosts);
+
+    hosts = new ArrayList<String>() {{ add("h1"); }};
+    RequestResourceFilter resourceFilter2 = new RequestResourceFilter("HDFS", "NAMENODE", hosts);
+
+    resourceFilters.add(resourceFilter1);
+    resourceFilters.add(resourceFilter2);
+
+    ExecuteActionRequest actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+    RequestStatusResponse response = null;
+    try {
+      response = controller.createAction(actionRequest, requestProperties);
+    } catch (AmbariException ae) {
+      LOG.info("Expected exception.", ae);
+      Assert.assertTrue(ae.getMessage().contains("Custom action definition only " +
+        "allows one resource filter to be specified"));
+    }
+    resourceFilters.remove(resourceFilter1);
+    actionRequest = new ExecuteActionRequest("c1", null, "a1", resourceFilters, params);
+    response = controller.createAction(actionRequest, requestProperties);
+
+    assertEquals(1, response.getTasks().size());
+    HostRoleCommand nnCommand = null;
+
+    for (HostRoleCommand hrc : actionDB.getRequestTasks(response.getRequestId())) {
+      if (hrc.getHostName().equals("h1")) {
+        nnCommand = hrc;
+      }
+    }
+
+    Assert.assertNotNull(nnCommand);
+    ExecutionCommand cmd = nnCommand.getExecutionCommandWrapper().getExecutionCommand();
+    Assert.assertEquals("a1", cmd.getRole());
+    Assert.assertTrue(cmd.getCommandParams().containsKey("test"));
+  }
+
+  @Test
+  public void testResourceFiltersWithCustomCommands() throws AmbariException {
+    setupClusterWithHosts("c1", "HDP-2.0.6",
+      new ArrayList<String>() {{
+        add("h1");
+        add("h2");
+        add("h3");
+      }},
+      "centos6");
+
+    Cluster cluster = clusters.getCluster("c1");
+    cluster.setDesiredStackVersion(new StackId("HDP-2.0.6"));
+    cluster.setCurrentStackVersion(new StackId("HDP-2.0.6"));
+
+    ConfigFactory cf = injector.getInstance(ConfigFactory.class);
+    Config config1 = cf.createNew(cluster, "global",
+      new HashMap<String, String>() {{
+        put("key1", "value1");
+      }});
+    config1.setVersionTag("version1");
+
+    Config config2 = cf.createNew(cluster, "core-site",
+      new HashMap<String, String>() {{
+        put("key1", "value1");
+      }});
+    config2.setVersionTag("version1");
+
+    cluster.addConfig(config1);
+    cluster.addConfig(config2);
+
+    Service hdfs = cluster.addService("HDFS");
+    hdfs.persist();
+
+    Service mapred = cluster.addService("YARN");
+    mapred.persist();
+
+    hdfs.addServiceComponent(Role.HDFS_CLIENT.name()).persist();
+    hdfs.addServiceComponent(Role.NAMENODE.name()).persist();
+    hdfs.addServiceComponent(Role.DATANODE.name()).persist();
+
+    mapred.addServiceComponent(Role.RESOURCEMANAGER.name()).persist();
+
+    hdfs.getServiceComponent(Role.HDFS_CLIENT.name()).addServiceComponentHost("h1").persist();
+    hdfs.getServiceComponent(Role.NAMENODE.name()).addServiceComponentHost("h1").persist();
+    hdfs.getServiceComponent(Role.DATANODE.name()).addServiceComponentHost("h1").persist();
+    hdfs.getServiceComponent(Role.DATANODE.name()).addServiceComponentHost("h2").persist();
+
+    mapred.getServiceComponent(Role.RESOURCEMANAGER.name()).addServiceComponentHost("h2").persist();
+
+    Map<String, String> params = new HashMap<String, String>() {{
+      put("test", "test");
+    }};
+
+    Map<String, String> requestProperties = new HashMap<String, String>();
+    requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
+
+    // Test multiple restarts
+    List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS",
+      Role.DATANODE.name(), new ArrayList<String>() {{ add("h1"); add("h2"); }});
+    resourceFilters.add(resourceFilter);
+    resourceFilter = new RequestResourceFilter("YARN",
+      Role.RESOURCEMANAGER.name(), new ArrayList<String>() {{ add("h2"); }});
+    resourceFilters.add(resourceFilter);
+
+    ExecuteActionRequest request = new ExecuteActionRequest("c1",
+      "RESTART", null, resourceFilters, params);
+
+    RequestStatusResponse response = controller.createAction(request, requestProperties);
+    Assert.assertEquals(3, response.getTasks().size());
+    List<HostRoleCommand> storedTasks = actionDB.getRequestTasks(response.getRequestId());
+
+    Assert.assertNotNull(storedTasks);
+    int expectedRestartCount = 0;
+    for (HostRoleCommand hrc : storedTasks) {
+      Assert.assertEquals("RESTART", hrc.getCustomCommandName());
+      if (hrc.getHostName().equals("h1") && hrc.getRole().equals(Role.DATANODE)) {
+        expectedRestartCount++;
+      } else if(hrc.getHostName().equals("h2")) {
+        if (hrc.getRole().equals(Role.DATANODE)) {
+          expectedRestartCount++;
+        } else if (hrc.getRole().equals(Role.RESOURCEMANAGER)) {
+          expectedRestartCount++;
+        }
+      }
+    }
+
+    Assert.assertEquals("Restart 2 datanodes and 1 Resourcemanager.", 3, expectedRestartCount);
+
+    // Test service checks - specific host
+    resourceFilters.clear();
+    resourceFilter = new RequestResourceFilter("HDFS", null,
+      new ArrayList<String>() {{ add("h2"); }});
+    resourceFilters.add(resourceFilter);
+    request = new ExecuteActionRequest("c1", Role.HDFS_SERVICE_CHECK.name(),
+      null, resourceFilters, null);
+    response = controller.createAction(request, requestProperties);
+
+    Assert.assertEquals(1, response.getTasks().size());
+    storedTasks = actionDB.getRequestTasks(response.getRequestId());
+    Assert.assertNotNull(storedTasks);
+    Assert.assertEquals(Role.HDFS_SERVICE_CHECK.name(),
+      storedTasks.get(0).getRole().name());
+    Assert.assertEquals("h2", storedTasks.get(0).getHostName());
+  }
+
+
+  @Test
   public void testConfigsAttachedToServiceChecks() throws AmbariException {
     String clusterName = "foo1";
     createCluster(clusterName);
@@ -6563,8 +6869,9 @@ public class AmbariManagementControllerTest {
       put("test", "test");
       put("excluded_hosts", " h1 ");
     }};
-    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", null,
-        "HDFS", "NAMENODE", null, params);
+    RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", "NAMENODE", null);
+    ExecuteActionRequest request = new ExecuteActionRequest(clusterName, "DECOMMISSION", params);
+    request.getResourceFilters().add(resourceFilter);
 
     Map<String, String> requestProperties = new HashMap<String, String>();
     requestProperties.put(REQUEST_CONTEXT_PROPERTY, "Called from a test");
@@ -8475,7 +8782,9 @@ public class AmbariManagementControllerTest {
 
       amc.createHostComponents(componentHostRequests);
 
-      ExecuteActionRequest ar = new ExecuteActionRequest(CLUSTER_NAME, Role.HDFS_SERVICE_CHECK.name(), "HDFS", null);
+      RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
+      ExecuteActionRequest ar = new ExecuteActionRequest(CLUSTER_NAME, Role.HDFS_SERVICE_CHECK.name(), null);
+      ar.getResourceFilters().add(resourceFilter);
       amc.createAction(ar, null);
 
       // change mind, delete the cluster

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java
index 3f46ce0..489499a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/MaintenanceStateHelperTest.java
@@ -28,6 +28,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Host;
@@ -93,9 +94,12 @@ public class MaintenanceStateHelperTest {
     map = rpCapture.getValue();
     
     Assert.assertEquals("nagios_update_ignore", ear.getActionName());
-    Assert.assertEquals("ACTIONEXECUTE", ear.getCommandName());
-    Assert.assertEquals("NAGIOS", ear.getServiceName());
-    Assert.assertEquals("NAGIOS_SERVER", ear.getComponentName());
+    Assert.assertEquals(null, ear.getCommandName());
+    Assert.assertEquals(1, ear.getResourceFilters().size());
+    RequestResourceFilter resourceFilter = ear.getResourceFilters().get(0);
+
+    Assert.assertEquals("NAGIOS", resourceFilter.getServiceName());
+    Assert.assertEquals("NAGIOS_SERVER", resourceFilter.getComponentName());
     Assert.assertEquals("c1", ear.getClusterName());
     Assert.assertTrue(map.containsKey("context"));  
   }
@@ -147,9 +151,11 @@ public class MaintenanceStateHelperTest {
     rpCapture.getValue();
     
     Assert.assertEquals("nagios_update_ignore", ear.getActionName());
-    Assert.assertEquals("ACTIONEXECUTE", ear.getCommandName());
-    Assert.assertEquals("NAGIOS", ear.getServiceName());
-    Assert.assertEquals("NAGIOS_SERVER", ear.getComponentName());
+    Assert.assertEquals(null, ear.getCommandName());
+    Assert.assertEquals(1, ear.getResourceFilters().size());
+    RequestResourceFilter resourceFilter = ear.getResourceFilters().get(0);
+    Assert.assertEquals("NAGIOS", resourceFilter.getServiceName());
+    Assert.assertEquals("NAGIOS_SERVER", resourceFilter.getComponentName());
     Assert.assertEquals("c1", ear.getClusterName());
     Assert.assertTrue(map.containsKey("context"));    
   }
@@ -203,9 +209,11 @@ public class MaintenanceStateHelperTest {
     map = rpCapture.getValue();
     
     Assert.assertEquals("nagios_update_ignore", ear.getActionName());
-    Assert.assertEquals("ACTIONEXECUTE", ear.getCommandName());
-    Assert.assertEquals("NAGIOS", ear.getServiceName());
-    Assert.assertEquals("NAGIOS_SERVER", ear.getComponentName());
+    Assert.assertEquals(null, ear.getCommandName());
+    Assert.assertEquals(1, ear.getResourceFilters().size());
+    RequestResourceFilter resourceFilter = ear.getResourceFilters().get(0);
+    Assert.assertEquals("NAGIOS", resourceFilter.getServiceName());
+    Assert.assertEquals("NAGIOS_SERVER", resourceFilter.getComponentName());
     Assert.assertEquals("c1", ear.getClusterName());
     Assert.assertTrue(map.containsKey("context"));
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
index e279342..d01e2e5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestResourceProviderTest.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.server.controller.internal;
 
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
@@ -38,6 +40,7 @@ import org.easymock.Capture;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.lang.reflect.Type;
 import java.util.*;
 
 import static org.easymock.EasyMock.capture;
@@ -763,10 +766,20 @@ public class RequestResourceProviderTest {
 
     properties.put(RequestResourceProvider.REQUEST_CLUSTER_NAME_PROPERTY_ID, "c1");
 
+    final RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
+    List<RequestResourceFilter> resourceFilters =
+      new ArrayList<RequestResourceFilter>() {{
+        add(resourceFilter);
+      }};
+
+    Type listType = new TypeToken<List<RequestResourceFilter>>(){}.getType();
+    String filterJson = new Gson().toJson(resourceFilters, listType);
+
+    properties.put(RequestResourceProvider.REQUEST_RESOURCE_FILTER_ID, filterJson);
+
     propertySet.add(properties);
 
     Map<String, String> requestInfoProperties = new HashMap<String, String>();
-    requestInfoProperties.put(RequestResourceProvider.SERVICE_NAME_ID, "HDFS");
     requestInfoProperties.put(RequestResourceProvider.COMMAND_ID, "HDFS_SERVICE_CHECK");
 
     // create the request
@@ -777,14 +790,19 @@ public class RequestResourceProviderTest {
         PropertyHelper.getKeyPropertyIds(type),
         managementController);
     provider.createResources(request);
+    ExecuteActionRequest capturedRequest = actionRequest.getValue();
+
     Assert.assertTrue(actionRequest.hasCaptured());
-    Assert.assertTrue(actionRequest.getValue().isCommand());
-    Assert.assertEquals(null, actionRequest.getValue().getActionName());
-    Assert.assertEquals("HDFS_SERVICE_CHECK", actionRequest.getValue().getCommandName());
-    Assert.assertEquals("HDFS", actionRequest.getValue().getServiceName());
-    Assert.assertEquals(null, actionRequest.getValue().getComponentName());
-    Assert.assertNotNull(actionRequest.getValue().getHosts());
-    Assert.assertEquals(0, actionRequest.getValue().getHosts().size());
+    Assert.assertTrue(capturedRequest.isCommand());
+    Assert.assertEquals(null, capturedRequest.getActionName());
+    Assert.assertEquals("HDFS_SERVICE_CHECK", capturedRequest.getCommandName());
+    Assert.assertNotNull(capturedRequest.getResourceFilters());
+    Assert.assertEquals(1, capturedRequest.getResourceFilters().size());
+    RequestResourceFilter capturedResourceFilter = capturedRequest.getResourceFilters().get(0);
+    Assert.assertEquals("HDFS", capturedResourceFilter.getServiceName());
+    Assert.assertEquals(null, capturedResourceFilter.getComponentName());
+    Assert.assertNotNull(capturedResourceFilter.getHostNames());
+    Assert.assertEquals(0, capturedResourceFilter.getHostNames().size());
     Assert.assertEquals(0, actionRequest.getValue().getParameters().size());
   }
 
@@ -811,13 +829,26 @@ public class RequestResourceProviderTest {
 
     properties.put(RequestResourceProvider.REQUEST_CLUSTER_NAME_PROPERTY_ID, "c1");
 
+    final RequestResourceFilter resourceFilter = new RequestResourceFilter("HDFS", null, null);
+    resourceFilter.getHostNames().add("host1");
+    resourceFilter.getHostNames().add("host2");
+    resourceFilter.getHostNames().add("host3");
+    List<RequestResourceFilter> resourceFilters =
+      new ArrayList<RequestResourceFilter>() {{
+        add(resourceFilter);
+      }};
+
+    Type listType = new TypeToken<List<RequestResourceFilter>>(){}.getType();
+    String filterJson = new Gson().toJson(resourceFilters, listType);
+
+    properties.put(RequestResourceProvider.REQUEST_RESOURCE_FILTER_ID, filterJson);
+
     propertySet.add(properties);
 
     Map<String, String> requestInfoProperties = new HashMap<String, String>();
-    requestInfoProperties.put(RequestResourceProvider.SERVICE_NAME_ID, "HDFS");
+
     requestInfoProperties.put("/parameters/param1", "value1");
     requestInfoProperties.put("/parameters/param2", "value2");
-    requestInfoProperties.put(RequestResourceProvider.HOSTS_ID, "host1 ,host2, host3 ");
 
     String[] expectedHosts = new String[]{"host1", "host2", "host3"};
     Map<String, String> expectedParams = new HashMap<String, String>() {{
@@ -852,16 +883,19 @@ public class RequestResourceProviderTest {
 
     provider.createResources(request);
     Assert.assertTrue(actionRequest.hasCaptured());
-    Assert.assertTrue(actionRequest.getValue().isCommand());
-    Assert.assertEquals(null, actionRequest.getValue().getActionName());
-    Assert.assertEquals("HDFS_SERVICE_CHECK", actionRequest.getValue().getCommandName());
-    Assert.assertEquals("HDFS", actionRequest.getValue().getServiceName());
-    Assert.assertEquals(null, actionRequest.getValue().getComponentName());
-    Assert.assertEquals(3, actionRequest.getValue().getHosts().size());
-    Assert.assertArrayEquals(expectedHosts, actionRequest.getValue().getHosts().toArray());
-    Assert.assertEquals(2, actionRequest.getValue().getParameters().size());
+    ExecuteActionRequest capturedRequest = actionRequest.getValue();
+    Assert.assertTrue(capturedRequest.isCommand());
+    Assert.assertEquals(null, capturedRequest.getActionName());
+    Assert.assertEquals("HDFS_SERVICE_CHECK", capturedRequest.getCommandName());
+    Assert.assertEquals(1, capturedRequest.getResourceFilters().size());
+    RequestResourceFilter capturedResourceFilter = capturedRequest.getResourceFilters().get(0);
+    Assert.assertEquals("HDFS", capturedResourceFilter.getServiceName());
+    Assert.assertEquals(null, capturedResourceFilter.getComponentName());
+    Assert.assertEquals(3, capturedResourceFilter.getHostNames().size());
+    Assert.assertArrayEquals(expectedHosts, capturedResourceFilter.getHostNames().toArray());
+    Assert.assertEquals(2, capturedRequest.getParameters().size());
     for(String key : expectedParams.keySet()) {
-      Assert.assertEquals(expectedParams.get(key), actionRequest.getValue().getParameters().get(key));
+      Assert.assertEquals(expectedParams.get(key), capturedRequest.getParameters().get(key));
     }
   }
 }


[3/3] git commit: AMBARI-4791. API call to restart all components on one or more hosts should result in one request. (swagle)

Posted by sw...@apache.org.
AMBARI-4791. API call to restart all components on one or more hosts should result in one request. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c6987eee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c6987eee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c6987eee

Branch: refs/heads/trunk
Commit: c6987eeef05bce7404635a9a88a4f30b94c134e0
Parents: 0b15548
Author: Siddharth Wagle <sw...@hortonworks.com>
Authored: Mon Feb 24 19:39:57 2014 -0800
Committer: Siddharth Wagle <sw...@hortonworks.com>
Committed: Tue Feb 25 10:18:15 2014 -0800

----------------------------------------------------------------------
 .../ambari/server/actionmanager/Request.java    |  81 +--
 .../controller/ActionExecutionContext.java      |  87 ++--
 .../controller/AmbariActionExecutionHelper.java | 320 ++++++------
 .../AmbariCustomCommandExecutionHelper.java     | 494 ++++++++-----------
 .../AmbariManagementControllerImpl.java         | 280 +++++++++--
 .../ambari/server/controller/AmbariServer.java  |   1 +
 .../server/controller/ControllerModule.java     |   5 +-
 .../server/controller/ExecuteActionRequest.java |  48 +-
 .../controller/MaintenanceStateHelper.java      |  15 +-
 .../internal/RequestResourceFilter.java         |  69 +++
 .../internal/RequestResourceProvider.java       |  49 +-
 .../server/orm/entities/RequestEntity.java      |  49 +-
 .../entities/RequestResourceFilterEntity.java   |  94 ++++
 .../server/upgrade/UpgradeCatalog150.java       |  10 +
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   4 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   4 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   7 +-
 .../src/main/resources/META-INF/persistence.xml |   2 +-
 .../src/main/resources/properties.json          |   4 +-
 .../actionmanager/TestActionDBAccessorImpl.java |   8 +-
 .../AmbariManagementControllerTest.java         | 431 +++++++++++++---
 .../controller/MaintenanceStateHelperTest.java  |  26 +-
 .../internal/RequestResourceProviderTest.java   |  72 ++-
 23 files changed, 1411 insertions(+), 749 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
index d1047a7..22731ee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/Request.java
@@ -23,7 +23,9 @@ import com.google.inject.assistedinject.Assisted;
 import com.google.inject.assistedinject.AssistedInject;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.ExecuteActionRequest;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.orm.entities.RequestEntity;
+import org.apache.ambari.server.orm.entities.RequestResourceFilterEntity;
 import org.apache.ambari.server.orm.entities.StageEntity;
 import org.apache.ambari.server.state.Clusters;
 import org.slf4j.Logger;
@@ -47,9 +49,7 @@ public class Request {
   private long endTime;
   private HostRoleStatus status; // not persisted yet
   private String inputs;
-  private String targetService;
-  private String targetComponent;
-  private String targetHosts;
+  private List<RequestResourceFilter> resourceFilters;
   private RequestType requestType;
 
   private Collection<Stage> stages = new ArrayList<Stage>();
@@ -112,9 +112,7 @@ public class Request {
                  Clusters clusters, Gson gson) throws AmbariException {
     this(stages, clusters);
     if (actionRequest != null) {
-      this.targetService = actionRequest.getServiceName();
-      this.targetComponent = actionRequest.getComponentName();
-      this.targetHosts = gson.toJson(actionRequest.getHosts());
+      this.resourceFilters = actionRequest.getResourceFilters();
       this.inputs = gson.toJson(actionRequest.getParameters());
       this.requestType = actionRequest.isCommand() ? RequestType.COMMAND : RequestType.ACTION;
       this.commandName = actionRequest.isCommand() ? actionRequest.getCommandName() : actionRequest.getActionName();
@@ -138,13 +136,11 @@ public class Request {
     this.endTime = entity.getEndTime();
     this.requestContext = entity.getRequestContext();
     this.inputs = entity.getInputs();
-    this.targetService = entity.getTargetService();
-    this.targetComponent = entity.getTargetComponent();
-    this.targetHosts = entity.getTargetHosts();
+
     this.requestType = entity.getRequestType();
     this.commandName = entity.getCommandName();
     this.status = entity.getStatus();
-    if (entity.getRequestScheduleEntity() !=null) {
+    if (entity.getRequestScheduleEntity() != null) {
       this.requestScheduleId = entity.getRequestScheduleEntity().getScheduleId();
     }
 
@@ -152,6 +148,28 @@ public class Request {
       Stage stage = stageFactory.createExisting(stageEntity);
       stages.add(stage);
     }
+
+    for (RequestResourceFilterEntity resourceFilterEntity : entity.getResourceFilterEntities()) {
+      RequestResourceFilter resourceFilter =
+        new RequestResourceFilter(
+            resourceFilterEntity.getServiceName(),
+            resourceFilterEntity.getComponentName(),
+            getHostsList(resourceFilterEntity.getHosts()));
+      this.resourceFilters.add(resourceFilter);
+    }
+
+  }
+
+  private List<String> getHostsList(String hosts) {
+    List<String> hostList = new ArrayList<String>();
+    if (hosts != null && !hosts.isEmpty()) {
+      for (String host : hosts.split(",")) {
+        if (!host.trim().isEmpty()) {
+          hostList.add(host.trim());
+        }
+      }
+    }
+    return hostList;
   }
 
   public Collection<Stage> getStages() {
@@ -176,13 +194,22 @@ public class Request {
     requestEntity.setEndTime(endTime);
     requestEntity.setRequestContext(requestContext);
     requestEntity.setInputs(inputs);
-    requestEntity.setTargetService(targetService);
-    requestEntity.setTargetComponent(targetComponent);
-    requestEntity.setTargetHosts(targetHosts);
     requestEntity.setRequestType(requestType);
     requestEntity.setRequestScheduleId(requestScheduleId);
     //TODO set all fields
 
+    if (resourceFilters != null) {
+      List<RequestResourceFilterEntity> filterEntities = new ArrayList<RequestResourceFilterEntity>();
+      for (RequestResourceFilter resourceFilter : resourceFilters) {
+        RequestResourceFilterEntity filterEntity = new RequestResourceFilterEntity();
+        filterEntity.setServiceName(resourceFilter.getServiceName());
+        filterEntity.setComponentName(resourceFilter.getComponentName());
+        filterEntity.setRequestEntity(requestEntity);
+        filterEntity.setRequestId(requestId);
+      }
+      requestEntity.setResourceFilterEntities(filterEntities);
+    }
+
     return requestEntity;
   }
 
@@ -231,28 +258,12 @@ public class Request {
     this.inputs = inputs;
   }
 
-  public String getTargetService() {
-    return targetService;
-  }
-
-  public void setTargetService(String targetService) {
-    this.targetService = targetService;
-  }
-
-  public String getTargetComponent() {
-    return targetComponent;
-  }
-
-  public void setTargetComponent(String targetComponent) {
-    this.targetComponent = targetComponent;
-  }
-
-  public String getTargetHosts() {
-    return targetHosts;
+  public List<RequestResourceFilter> getResourceFilters() {
+    return resourceFilters;
   }
 
-  public void setTargetHosts(String targetHosts) {
-    this.targetHosts = targetHosts;
+  public void setResourceFilters(List<RequestResourceFilter> resourceFilters) {
+    this.resourceFilters = resourceFilters;
   }
 
   public RequestType getRequestType() {
@@ -298,9 +309,7 @@ public class Request {
         ", startTime=" + startTime +
         ", endTime=" + endTime +
         ", inputs='" + inputs + '\'' +
-        ", targetService='" + targetService + '\'' +
-        ", targetComponent='" + targetComponent + '\'' +
-        ", targetHosts='" + targetHosts + '\'' +
+        ", resourceFilters='" + resourceFilters + '\'' +
         ", requestType=" + requestType +
         ", stages=" + stages +
         '}';

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
index b59eff1..37a404f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ActionExecutionContext.java
@@ -20,8 +20,8 @@
 package org.apache.ambari.server.controller;
 
 import org.apache.ambari.server.actionmanager.TargetHostType;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 
-import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -31,59 +31,60 @@ import java.util.Map;
 public class ActionExecutionContext {
   private final String clusterName;
   private final String actionName;
-  private final String serviceName;
-  private final String componentName;
-  private final String componentCategory;
-  private final List<String> hosts;
-  private final Map<String, String> parameters;
-  private final TargetHostType targetType;
-  private final Short timeout;
+  private List<RequestResourceFilter> resourceFilters;
+  private Map<String, String> parameters;
+  private TargetHostType targetType;
+  private Short timeout;
+  private String expectedServiceName;
+  private String expectedComponentName;
 
   /**
    * Create an ActionExecutionContext to execute an action from a request
    */
-  public ActionExecutionContext(String clusterName, String actionName, String serviceName,
-                                String componentName, String componentCategory,
-                                List<String> hosts, Map<String, String> parameters,
-                                TargetHostType targetType, Short timeout) {
+  public ActionExecutionContext(String clusterName, String actionName,
+      List<RequestResourceFilter> resourceFilters,
+      Map<String, String> parameters, TargetHostType targetType,
+      Short timeout, String expectedServiceName,
+      String expectedComponentName) {
+
     this.clusterName = clusterName;
     this.actionName = actionName;
-    this.serviceName = serviceName;
-    this.componentName = componentName;
-    this.componentCategory = componentCategory;
+    this.resourceFilters = resourceFilters;
     this.parameters = parameters;
-    this.hosts = new ArrayList<String>();
-    if (hosts != null) {
-      this.hosts.addAll(hosts);
-    }
     this.targetType = targetType;
     this.timeout = timeout;
+    this.expectedServiceName = expectedServiceName;
+    this.expectedComponentName = expectedComponentName;
   }
 
-  public String getClusterName() {
-    return clusterName;
+  public ActionExecutionContext(String clusterName, String actionName,
+                                List<RequestResourceFilter> resourceFilters) {
+    this.clusterName = clusterName;
+    this.actionName = actionName;
+    this.resourceFilters = resourceFilters;
   }
 
-  public String getActionName() {
-    return actionName;
+  public ActionExecutionContext(String clusterName, String commandName,
+                                List<RequestResourceFilter> resourceFilters,
+                                Map<String, String> parameters) {
+    this.clusterName = clusterName;
+    this.actionName = commandName;
+    this.resourceFilters = resourceFilters;
+    this.parameters = parameters;
   }
 
-  public String getServiceName() {
-    return serviceName;
+  public String getClusterName() {
+    return clusterName;
   }
 
-  public String getComponentName() {
-    return componentName;
+  public String getActionName() {
+    return actionName;
   }
 
   public Map<String, String> getParameters() {
     return parameters;
   }
 
-  public List<String> getHosts() {
-    return hosts;
-  }
-
   public TargetHostType getTargetType() {
     return targetType;
   }
@@ -92,7 +93,27 @@ public class ActionExecutionContext {
     return timeout;
   }
 
-  public String getComponentCategory() {
-    return componentCategory;
+  public List<RequestResourceFilter> getResourceFilters() {
+    return resourceFilters;
+  }
+
+  public String getExpectedServiceName() {
+    return expectedServiceName;
+  }
+
+  public String getExpectedComponentName() {
+    return expectedComponentName;
+  }
+
+  @Override
+  public String toString() {
+    return "ActionExecutionContext{" +
+      "clusterName='" + clusterName + '\'' +
+      ", actionName='" + actionName + '\'' +
+      ", resourceFilters=" + resourceFilters +
+      ", parameters=" + parameters +
+      ", targetType=" + targetType +
+      ", timeout=" + timeout +
+      '}';
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
index 74aa1fd..85f9078 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariActionExecutionHelper.java
@@ -18,16 +18,8 @@
 
 package org.apache.ambari.server.controller;
 
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
-
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.Role;
 import org.apache.ambari.server.RoleCommand;
@@ -37,7 +29,7 @@ import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.actionmanager.TargetHostType;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
-import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.state.Cluster;
@@ -51,45 +43,51 @@ import org.apache.ambari.server.utils.StageUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 
 /**
  * Helper class containing logic to process custom action execution requests
  */
+@Singleton
 public class AmbariActionExecutionHelper {
   private final static Logger LOG =
       LoggerFactory.getLogger(AmbariActionExecutionHelper.class);
   private static final String TYPE_PYTHON = "PYTHON";
+
+  @Inject
   private ActionMetadata actionMetadata;
+  @Inject
   private Clusters clusters;
-  private AmbariManagementControllerImpl amcImpl;
+  @Inject
+  private AmbariManagementController managementController;
+  @Inject
   private ActionManager actionManager;
+  @Inject
   private AmbariMetaInfo ambariMetaInfo;
 
-  public AmbariActionExecutionHelper(ActionMetadata actionMetadata, Clusters clusters,
-                                     AmbariManagementControllerImpl amcImpl) {
-    this.amcImpl = amcImpl;
-    this.actionMetadata = actionMetadata;
-    this.clusters = clusters;
-    this.actionManager = amcImpl.getActionManager();
-    this.ambariMetaInfo = amcImpl.getAmbariMetaInfo();
-  }
-
-  // TODO: validate should not return context, should make it consistent with Command Execution helper
   /**
-   * Validates the request to execute an action
-   *
+   * Validates the request to execute an action.
    * @param actionRequest
-   * @param cluster
-   * @return
    * @throws AmbariException
    */
-  public ActionExecutionContext validateCustomAction(ExecuteActionRequest actionRequest, Cluster cluster)
-      throws AmbariException {
+  public void validateAction(ExecuteActionRequest actionRequest) throws AmbariException {
+    Cluster cluster = clusters.getCluster(actionRequest.getClusterName());
+
+    if (cluster == null) {
+      throw new AmbariException("Unable to find cluster. clusterName = " +
+        actionRequest.getClusterName());
+    }
+
     if (actionRequest.getActionName() == null || actionRequest.getActionName().isEmpty()) {
       throw new AmbariException("Action name must be specified");
     }
@@ -99,196 +97,214 @@ public class AmbariActionExecutionHelper {
       throw new AmbariException("Action " + actionRequest.getActionName() + " does not exist");
     }
 
+    if (actionDef.getInputs() != null) {
+      String[] inputs = actionDef.getInputs().split(",");
+      for (String input : inputs) {
+        String inputName = input.trim();
+        if (!inputName.isEmpty()) {
+          boolean mandatory = true;
+          if (inputName.startsWith("[") && inputName.endsWith("]")) {
+            mandatory = false;
+          }
+          if (mandatory && !actionRequest.getParameters().containsKey(inputName)) {
+            throw new AmbariException("Action " + actionRequest.getActionName() + " requires input '" +
+              input.trim() + "' that is not provided.");
+          }
+        }
+      }
+    }
+
+
     StackId stackId = cluster.getCurrentStackVersion();
     String expectedService = actionDef.getTargetService() == null ? "" : actionDef.getTargetService();
-    String actualService = actionRequest.getServiceName() == null ? "" : actionRequest.getServiceName();
-    if (!expectedService.isEmpty() && !actualService.isEmpty() && !expectedService.equals(actualService)) {
-      throw new AmbariException("Action " + actionRequest.getActionName() + " targets service " + actualService +
+
+    List<RequestResourceFilter> resourceFilters = actionRequest.getResourceFilters();
+    String targetService = "";
+    String targetComponent = "";
+    RequestResourceFilter resourceFilter = null;
+
+    if (resourceFilters != null && !resourceFilters.isEmpty()) {
+      if (resourceFilters.size() > 1) {
+        throw new AmbariException("Custom action definition only allows one " +
+          "resource filter to be specified.");
+      }
+
+      resourceFilter = resourceFilters.get(0);
+      String actualService = resourceFilter.getServiceName() == null ? "" : resourceFilter.getServiceName();
+      if (!expectedService.isEmpty() && !actualService.isEmpty() && !expectedService.equals(actualService)) {
+        throw new AmbariException("Action " + actionRequest.getActionName() + " targets service " + actualService +
           " that does not match with expected " + expectedService);
-    }
+      }
 
-    String targetService = expectedService;
-    if (targetService == null || targetService.isEmpty()) {
-      targetService = actualService;
-    }
+      targetService = expectedService;
+      if (targetService == null || targetService.isEmpty()) {
+        targetService = actualService;
+      }
 
-    if (targetService != null && !targetService.isEmpty()) {
-      ServiceInfo serviceInfo;
-      try {
-        serviceInfo = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(),
+      if (targetService != null && !targetService.isEmpty()) {
+        ServiceInfo serviceInfo;
+        try {
+          serviceInfo = ambariMetaInfo.getService(stackId.getStackName(), stackId.getStackVersion(),
             targetService);
-      } catch (StackAccessException se) {
-        serviceInfo = null;
-      }
+        } catch (StackAccessException se) {
+          serviceInfo = null;
+        }
 
-      if (serviceInfo == null) {
-        throw new AmbariException("Action " + actionRequest.getActionName() + " targets service " + targetService +
-            " that does not exist.");
+        if (serviceInfo == null) {
+          throw new AmbariException("Action " + actionRequest.getActionName() +
+            " targets service " + targetService + " that does not exist.");
+        }
       }
-    }
 
-    String expectedComponent = actionDef.getTargetComponent() == null ? "" : actionDef.getTargetComponent();
-    String actualComponent = actionRequest.getComponentName() == null ? "" : actionRequest.getComponentName();
-    if (!expectedComponent.isEmpty() && !actualComponent.isEmpty() && !expectedComponent.equals(actualComponent)) {
-      throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + actualComponent +
+      String expectedComponent = actionDef.getTargetComponent() == null ? "" : actionDef.getTargetComponent();
+      String actualComponent = resourceFilter.getComponentName() == null ? "" : resourceFilter.getComponentName();
+      if (!expectedComponent.isEmpty() && !actualComponent.isEmpty() && !expectedComponent.equals(actualComponent)) {
+        throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + actualComponent +
           " that does not match with expected " + expectedComponent);
-    }
+      }
 
-    String targetComponent = expectedComponent;
-    String componentCategory = "";
-    if (targetComponent == null || targetComponent.isEmpty()) {
-      targetComponent = actualComponent;
-    }
+      targetComponent = expectedComponent;
+      if (targetComponent == null || targetComponent.isEmpty()) {
+        targetComponent = actualComponent;
+      }
 
-    if (!targetComponent.isEmpty() && targetService.isEmpty()) {
-      throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + targetComponent +
+      if (!targetComponent.isEmpty() && targetService.isEmpty()) {
+        throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + targetComponent +
           " without specifying the target service.");
-    }
+      }
 
-    if (targetComponent != null && !targetComponent.isEmpty()) {
-      ComponentInfo compInfo;
-      try {
-        compInfo = ambariMetaInfo.getComponent(stackId.getStackName(), stackId.getStackVersion(),
+      if (targetComponent != null && !targetComponent.isEmpty()) {
+        ComponentInfo compInfo;
+        try {
+          compInfo = ambariMetaInfo.getComponent(stackId.getStackName(), stackId.getStackVersion(),
             targetService, targetComponent);
-      } catch (StackAccessException se) {
-        compInfo = null;
-      }
+        } catch (StackAccessException se) {
+          compInfo = null;
+        }
 
-      if (compInfo == null) {
-        throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + targetComponent +
+        if (compInfo == null) {
+          throw new AmbariException("Action " + actionRequest.getActionName() + " targets component " + targetComponent +
             " that does not exist.");
-      }
-      componentCategory = compInfo.getCategory();
-    }
-
-    if (actionDef.getInputs() != null) {
-      String[] inputs = actionDef.getInputs().split(",");
-      for (String input : inputs) {
-        String inputName = input.trim();
-        if (!inputName.isEmpty()) {
-          boolean mandatory = true;
-          if (inputName.startsWith("[") && inputName.endsWith("]")) {
-            mandatory = false;
-          }
-          if (mandatory && !actionRequest.getParameters().containsKey(inputName)) {
-            throw new AmbariException("Action " + actionRequest.getActionName() + " requires input '" +
-                input.trim() + "' that is not provided.");
-          }
         }
       }
     }
 
-    if (TargetHostType.SPECIFIC.name().equals(actionDef.getTargetType())
-        || (targetService.isEmpty() && targetService.isEmpty())) {
-      if (actionRequest.getHosts().size() == 0) {
+    if (TargetHostType.SPECIFIC.equals(actionDef.getTargetType())
+      || (targetService.isEmpty() && targetComponent.isEmpty())) {
+      if (resourceFilter == null || resourceFilter.getHostNames().size() == 0) {
         throw new AmbariException("Action " + actionRequest.getActionName() + " requires explicit target host(s)" +
-            " that is not provided.");
+          " that is not provided.");
       }
     }
-
-    LOG.info("Received action execution request"
-        + ", clusterName=" + actionRequest.getClusterName()
-        + ", request=" + actionRequest.toString());
-
-    ActionExecutionContext actionExecutionContext = new ActionExecutionContext(
-      actionRequest.getClusterName(), actionRequest.getActionName(),
-      targetService, targetComponent, componentCategory, actionRequest.getHosts(),
-      actionRequest.getParameters(), actionDef.getTargetType(), actionDef.getDefaultTimeout());
-
-    return actionExecutionContext;
   }
 
+
   /**
    * Add tasks to the stage based on the requested action execution
-   *
-   * @param actionContext   the context associated with the action
-   * @param stage           stage into which tasks must be inserted
-   * @param configuration
-   * @param hostsMap
-   * @param hostLevelParams
+   * @param actionContext the context associated with the action
+   * @param stage stage into which tasks must be inserted
+   * @param hostLevelParams host level params to send with the command
    * @throws AmbariException
    */
-  public void addAction(ActionExecutionContext actionContext, Stage stage,
-                        Configuration configuration, HostsMap hostsMap, Map<String, String> hostLevelParams)
+  public void addExecutionCommandsToStage(ActionExecutionContext
+    actionContext, Stage stage, Map<String, String> hostLevelParams)
       throws AmbariException {
+
     String actionName = actionContext.getActionName();
     String clusterName = actionContext.getClusterName();
-    String serviceName = actionContext.getServiceName();
-    String componentName = actionContext.getComponentName();
+    Cluster cluster = clusters.getCluster(clusterName);
+
+    List<RequestResourceFilter> resourceFilters = actionContext.getResourceFilters();
+
+    RequestResourceFilter resourceFilter = new RequestResourceFilter();
+    if (resourceFilters != null && !resourceFilters.isEmpty()) {
+      resourceFilter = resourceFilters.get(0);
+    }
 
     // List of host to select from
     Set<String> candidateHosts = new HashSet<String>();
-    if (!serviceName.isEmpty()) {
-      if (!componentName.isEmpty()) {
+
+    String serviceName = actionContext.getExpectedServiceName();
+    String componentName = actionContext.getExpectedComponentName();
+    StackId stackId = cluster.getCurrentStackVersion();
+    ComponentInfo componentInfo = null;
+
+    if (serviceName != null && !serviceName.isEmpty()) {
+      if (componentName != null && !componentName.isEmpty()) {
         Map<String, ServiceComponentHost> componentHosts =
-            clusters.getCluster(clusterName).getService(serviceName)
-                .getServiceComponent(componentName).getServiceComponentHosts();
+          cluster.getService(serviceName)
+            .getServiceComponent(componentName).getServiceComponentHosts();
         candidateHosts.addAll(componentHosts.keySet());
+        componentInfo = ambariMetaInfo.getComponentCategory(stackId.getStackName(),
+          stackId.getStackVersion(), serviceName, componentName);
       } else {
-        for (String component : clusters.getCluster(clusterName).getService(serviceName)
-            .getServiceComponents().keySet()) {
+        for (String component : cluster.getService(serviceName).getServiceComponents().keySet()) {
           Map<String, ServiceComponentHost> componentHosts =
-              clusters.getCluster(clusterName).getService(serviceName)
-                  .getServiceComponent(component).getServiceComponentHosts();
+            cluster.getService(serviceName)
+              .getServiceComponent(component).getServiceComponentHosts();
           candidateHosts.addAll(componentHosts.keySet());
         }
       }
     } else {
       // All hosts are valid target host
-      candidateHosts.addAll(amcImpl.getClusters().getHostsForCluster(clusterName).keySet());
+      candidateHosts.addAll(clusters.getHostsForCluster(cluster.getClusterName()).keySet());
     }
 
     // If request did not specify hosts and there exists no host
-    if (actionContext.getHosts().isEmpty() && candidateHosts.isEmpty()) {
+    if (resourceFilter.getHostNames().isEmpty() && candidateHosts.isEmpty()) {
       throw new AmbariException("Suitable hosts not found, component="
-          + componentName + ", service=" + serviceName
-          + ", cluster=" + clusterName + ", actionName=" + actionName);
+        + componentName + ", service=" + serviceName
+        + ", cluster=" + cluster.getClusterName() + ", " +
+        "actionName=" + actionContext.getActionName());
     }
 
     // Compare specified hosts to available hosts
-    if (!actionContext.getHosts().isEmpty() && !candidateHosts.isEmpty()) {
-      for (String hostname : actionContext.getHosts()) {
+    if (!resourceFilter.getHostNames().isEmpty() && !candidateHosts.isEmpty()) {
+      for (String hostname : resourceFilter.getHostNames()) {
         if (!candidateHosts.contains(hostname)) {
-          throw new AmbariException("Request specifies host " + hostname + " but its not a valid host based on the " +
-              "target service=" + serviceName + " and component=" + componentName);
+          throw new AmbariException("Request specifies host " + hostname +
+            " but its not a valid host based on the " +
+            "target service=" + serviceName + " and component=" + componentName);
         }
       }
     }
 
+    List<String> targetHosts = resourceFilter.getHostNames();
+
     //Find target hosts to execute
-    if (actionContext.getHosts().isEmpty()) {
+    if (targetHosts.isEmpty()) {
       TargetHostType hostType = actionContext.getTargetType();
       switch (hostType) {
         case ALL:
-          actionContext.getHosts().addAll(candidateHosts);
+          targetHosts.addAll(candidateHosts);
           break;
         case ANY:
-          actionContext.getHosts().add(amcImpl.getHealthyHost(candidateHosts));
+          targetHosts.add(managementController.getHealthyHost(candidateHosts));
           break;
         case MAJORITY:
           for (int i = 0; i < (candidateHosts.size() / 2) + 1; i++) {
-            String hostname = amcImpl.getHealthyHost(candidateHosts);
-            actionContext.getHosts().add(hostname);
+            String hostname = managementController.getHealthyHost(candidateHosts);
+            targetHosts.add(hostname);
             candidateHosts.remove(hostname);
           }
           break;
         default:
-          throw new AmbariException("Unsupported target type=" + hostType);
+          throw new AmbariException("Unsupported target type = " + hostType);
       }
     }
 
     //create tasks for each host
-    for (String hostName : actionContext.getHosts()) {
-      stage.addHostRoleExecutionCommand(hostName, Role.valueOf(actionContext.getActionName()), RoleCommand.ACTIONEXECUTE,
-          new ServiceComponentHostOpInProgressEvent(actionContext.getActionName(), hostName,
-              System.currentTimeMillis()), clusterName, actionContext.getServiceName());
-
-      Cluster cluster = clusters.getCluster(clusterName);
+    for (String hostName : targetHosts) {
+      stage.addHostRoleExecutionCommand(hostName,
+        Role.valueOf(actionContext.getActionName()), RoleCommand.ACTIONEXECUTE,
+          new ServiceComponentHostOpInProgressEvent(actionContext.getActionName(),
+            hostName, System.currentTimeMillis()), clusterName,
+              serviceName);
 
       Map<String, Map<String, String>> configurations = new TreeMap<String, Map<String, String>>();
       Map<String, Map<String, String>> configTags = null;
-      if (!actionContext.getServiceName().isEmpty()) {
-        configTags = amcImpl.findConfigurationTagsWithOverrides(cluster, hostName);
+      if (!serviceName.isEmpty()) {
+        configTags = managementController.findConfigurationTagsWithOverrides(cluster, hostName);
       }
 
       Map<String, String> commandParams = actionContext.getParameters();
@@ -298,7 +314,7 @@ public class AmbariActionExecutionHelper {
       commandParams.put(SCHEMA_VERSION, AmbariMetaInfo.SCHEMA_VERSION_2);
 
       ExecutionCommand execCmd = stage.getExecutionCommandWrapper(hostName,
-          actionContext.getActionName()).getExecutionCommand();
+        actionContext.getActionName()).getExecutionCommand();
 
       /*
        * TODO Execution command field population should be (partially?)
@@ -308,25 +324,27 @@ public class AmbariActionExecutionHelper {
       execCmd.setConfigurationTags(configTags);
       execCmd.setHostLevelParams(hostLevelParams);
       execCmd.setCommandParams(commandParams);
-      execCmd.setServiceName(serviceName);
-      execCmd.setComponentName(componentName);
+      execCmd.setServiceName(serviceName == null || serviceName.isEmpty() ?
+        resourceFilter.getServiceName() : serviceName);
+      execCmd.setComponentName(componentName == null || componentName.isEmpty() ?
+        resourceFilter.getComponentName() : componentName);
 
       Map<String, String> roleParams = execCmd.getRoleParams();
       if (roleParams == null) {
         roleParams = new TreeMap<String, String>();
       }
       roleParams.putAll(actionContext.getParameters());
-      roleParams.put(COMPONENT_CATEGORY, actionContext.getComponentCategory());
+      if (componentInfo != null) {
+        roleParams.put(COMPONENT_CATEGORY, componentInfo.getCategory());
+      }
       execCmd.setRoleParams(roleParams);
 
       // Generate cluster host info
       execCmd.setClusterHostInfo(
-          StageUtils.getClusterHostInfo(clusters.getHostsForCluster(clusterName), cluster));
-      
+        StageUtils.getClusterHostInfo(clusters.getHostsForCluster(clusterName), cluster));
+
       // cluster passive map
-      execCmd.setPassiveInfo(
-          MaintenanceStateHelper.getMaintenanceHostCompoments(clusters, cluster));
-          
+      execCmd.setPassiveInfo(MaintenanceStateHelper.getMaintenanceHostCompoments(clusters, cluster));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index ba42cc5..abd82ac 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -29,6 +29,7 @@ import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
@@ -42,9 +43,7 @@ import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
-import org.apache.ambari.server.state.ServiceComponentHostEvent;
 import org.apache.ambari.server.state.ServiceInfo;
-import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
@@ -53,7 +52,6 @@ import org.apache.ambari.server.utils.StageUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -62,29 +60,15 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
-
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMPONENT_CATEGORY;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_COMMAND;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_LIST;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
-
 
 /**
  * Helper class containing logic to process custom command execution requests .
@@ -116,7 +100,7 @@ public class AmbariCustomCommandExecutionHelper {
   @Inject
   private Clusters clusters;
   @Inject
-  private AmbariManagementController amc;
+  private AmbariManagementController managementController;
   @Inject
   private Gson gson;
   @Inject
@@ -126,6 +110,10 @@ public class AmbariCustomCommandExecutionHelper {
   @Inject
   private ConfigHelper configHelper;
 
+  protected static final String SERVICE_CHECK_COMMAND_NAME = "SERVICE_CHECK";
+  protected static final String DECOMMISSION_COMMAND_NAME = "DECOMMISSION";
+
+
   private Boolean isServiceCheckCommand(String command, String service) {
     List<String> actions = actionMetadata.getActions(service);
     if (actions == null || actions.size() == 0) {
@@ -139,114 +127,98 @@ public class AmbariCustomCommandExecutionHelper {
     return true;
   }
 
-  private Boolean isValidCustomCommand(ExecuteActionRequest actionRequest) throws AmbariException {
-    String clustername = actionRequest.getClusterName();
-    Cluster cluster = clusters.getCluster(clustername);
+  private Boolean isValidCustomCommand(String clusterName,
+      String serviceName, String componentName, String commandName)
+      throws AmbariException {
+
+    Cluster cluster = clusters.getCluster(clusterName);
     StackId stackId = cluster.getDesiredStackVersion();
-    String serviceName = actionRequest.getServiceName();
-    String componentName = actionRequest.getComponentName();
-    String commandName = actionRequest.getCommandName();
 
     if (componentName == null) {
       return false;
     }
     ComponentInfo componentInfo = ambariMetaInfo.getComponent(
-        stackId.getStackName(), stackId.getStackVersion(),
-        serviceName, componentName);
+      stackId.getStackName(), stackId.getStackVersion(),
+      serviceName, componentName);
 
-    if (!componentInfo.isCustomCommand(commandName) &&
-        !actionMetadata.isDefaultHostComponentCommand(commandName)) {
-      return false;
-    }
-    return true;
+    return !(!componentInfo.isCustomCommand(commandName) &&
+      !actionMetadata.isDefaultHostComponentCommand(commandName));
   }
 
-  public void validateCustomCommand(ExecuteActionRequest actionRequest) throws AmbariException {
-    if (actionRequest.getServiceName() == null
-        || actionRequest.getServiceName().isEmpty()
-        || actionRequest.getCommandName() == null
-        || actionRequest.getCommandName().isEmpty()) {
-      throw new AmbariException("Invalid request : " + "cluster="
-          + actionRequest.getClusterName() + ", service="
-          + actionRequest.getServiceName() + ", command="
-          + actionRequest.getCommandName());
-    }
-
-    LOG.info("Received a command execution request"
-        + ", clusterName=" + actionRequest.getClusterName()
-        + ", serviceName=" + actionRequest.getServiceName()
-        + ", request=" + actionRequest.toString());
+  private Boolean isValidCustomCommand(ActionExecutionContext
+      actionExecutionContext, RequestResourceFilter resourceFilter)
+      throws AmbariException {
+    String clusterName = actionExecutionContext.getClusterName();
+    String serviceName = resourceFilter.getServiceName();
+    String componentName = resourceFilter.getComponentName();
+    String commandName = actionExecutionContext.getActionName();
 
-    if (!isServiceCheckCommand(actionRequest.getCommandName(), actionRequest.getServiceName())
-        && !isValidCustomCommand(actionRequest)) {
-      throw new AmbariException(
-          "Unsupported action " + actionRequest.getCommandName() + " for Service: " + actionRequest.getServiceName()
-              + " and Component: " + actionRequest.getComponentName());
+    if (componentName == null) {
+      return false;
     }
+
+    return isValidCustomCommand(clusterName, serviceName, componentName, commandName);
   }
 
-  /**
-   * Other than Service_Check and Decommission all other commands are pass-through
-   *
-   * @param actionRequest   received request to execute a command
-   * @param stage           the initial stage for task creation
-   * @param hostLevelParams specific parameters for the hosts
-   * @throws AmbariException
-   */
-  public void addAction(ExecuteActionRequest actionRequest, Stage stage,
-                        Map<String, String> hostLevelParams)
-      throws AmbariException {
-    if (actionRequest.getCommandName().contains("SERVICE_CHECK")) {
-      findHostAndAddServiceCheckAction(actionRequest, stage, hostLevelParams);
-    } else if (actionRequest.getCommandName().equals("DECOMMISSION")) {
-      addDecommissionAction(actionRequest, stage, hostLevelParams);
-    } else if (isValidCustomCommand(actionRequest)) {
-      String commandDetail = getReadableCustomCommandDetail(actionRequest);
-      addCustomCommandAction(actionRequest, stage, hostLevelParams, null, commandDetail);
-    } else {
-      throw new AmbariException("Unsupported action " + actionRequest.getCommandName());
+  private Boolean isValidCustomCommand(ExecuteActionRequest actionRequest,
+      RequestResourceFilter resourceFilter) throws AmbariException {
+    String clusterName = actionRequest.getClusterName();
+    String serviceName = resourceFilter.getServiceName();
+    String componentName = resourceFilter.getComponentName();
+    String commandName = actionRequest.getCommandName();
+
+    if (componentName == null) {
+      return false;
     }
+
+    return isValidCustomCommand(clusterName, serviceName, componentName, commandName);
   }
 
-  private String getReadableCustomCommandDetail(ExecuteActionRequest actionRequest) {
-    StringBuffer sb = new StringBuffer();
-    sb.append(actionRequest.getCommandName());
-    if (actionRequest.getServiceName() != null && !actionRequest.getServiceName().equals("")) {
-      sb.append(" " + actionRequest.getServiceName());
+  private String getReadableCustomCommandDetail(ActionExecutionContext
+        actionRequest, RequestResourceFilter resourceFilter) {
+    StringBuilder sb = new StringBuilder();
+    sb.append(actionRequest.getActionName());
+    if (resourceFilter.getServiceName() != null
+        && !resourceFilter.getServiceName().equals("")) {
+      sb.append(" ");
+      sb.append(resourceFilter.getServiceName());
     }
-    if (actionRequest.getComponentName() != null && !actionRequest.getComponentName().equals("")) {
-      sb.append("/" + actionRequest.getComponentName());
+    if (resourceFilter.getComponentName() != null
+        && !resourceFilter.getComponentName().equals("")) {
+      sb.append("/");
+      sb.append(resourceFilter.getComponentName());
     }
     return sb.toString();
   }
 
-  private void addCustomCommandAction(ExecuteActionRequest actionRequest,
+  private void addCustomCommandAction(ActionExecutionContext actionExecutionContext,
+                                      RequestResourceFilter resourceFilter,
                                       Stage stage, Map<String, String> hostLevelParams,
                                       Map<String, String> additionalCommandParams,
                                       String commandDetail)
-      throws AmbariException {
+                                      throws AmbariException {
 
-    if (actionRequest.getHosts().isEmpty()) {
+    List<String> hosts = resourceFilter.getHostNames();
+    if (hosts.isEmpty()) {
       throw new AmbariException("Invalid request : No hosts specified.");
     }
 
-    String serviceName = actionRequest.getServiceName();
-    String componentName = actionRequest.getComponentName();
-    String commandName = actionRequest.getCommandName();
+    String serviceName = resourceFilter.getServiceName();
+    String componentName = resourceFilter.getComponentName();
+    String commandName = actionExecutionContext.getActionName();
 
     String clusterName = stage.getClusterName();
     Cluster cluster = clusters.getCluster(clusterName);
     StackId stackId = cluster.getDesiredStackVersion();
-    AmbariMetaInfo ambariMetaInfo = amc.getAmbariMetaInfo();
-    ServiceInfo serviceInfo =
-        ambariMetaInfo.getServiceInfo(stackId.getStackName(),
-            stackId.getStackVersion(), serviceName);
-    StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
-        stackId.getStackVersion());
+    AmbariMetaInfo ambariMetaInfo = managementController.getAmbariMetaInfo();
+    ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo
+      (stackId.getStackName(), stackId.getStackVersion(), serviceName);
+    StackInfo stackInfo = ambariMetaInfo.getStackInfo
+      (stackId.getStackName(), stackId.getStackVersion());
 
     long nowTimestamp = System.currentTimeMillis();
 
-    for (String hostName : actionRequest.getHosts()) {
+    for (String hostName : hosts) {
 
       Host host = clusters.getHost(hostName);
 
@@ -258,12 +230,12 @@ public class AmbariCustomCommandExecutionHelper {
       Map<String, Map<String, String>> configurations =
           new TreeMap<String, Map<String, String>>();
       Map<String, Map<String, String>> configTags =
-          amc.findConfigurationTagsWithOverrides(cluster, hostName);
+          managementController.findConfigurationTagsWithOverrides(cluster, hostName);
 
       HostRoleCommand cmd = stage.getHostRoleCommand(hostName, componentName);
       if (cmd != null) {
         cmd.setCommandDetail(commandDetail);
-        cmd.setCustomCommandName(actionRequest.getCommandName());
+        cmd.setCustomCommandName(commandName);
       }
 
       ExecutionCommand execCmd = stage.getExecutionCommandWrapper(hostName,
@@ -277,7 +249,8 @@ public class AmbariCustomCommandExecutionHelper {
 
       hostLevelParams.put(CUSTOM_COMMAND, commandName);
       // Set parameters required for re-installing clients on restart
-      hostLevelParams.put(REPO_INFO, getRepoInfo(cluster, host));
+      hostLevelParams.put(REPO_INFO, getRepoInfo
+        (cluster, host));
       execCmd.setHostLevelParams(hostLevelParams);
 
       Map<String, String> commandParams = new TreeMap<String, String>();
@@ -329,37 +302,50 @@ public class AmbariCustomCommandExecutionHelper {
     }
   }
 
-  private void findHostAndAddServiceCheckAction(ExecuteActionRequest actionRequest, Stage stage,
-                                                Map<String, String> hostLevelParams)
+  private void findHostAndAddServiceCheckAction(ActionExecutionContext
+      actionExecutionContext, RequestResourceFilter resourceFilter,
+      Stage stage, Map<String, String> hostLevelParams)
       throws AmbariException {
-    String clusterName = actionRequest.getClusterName();
-    String componentName = actionMetadata.getClient(actionRequest
-        .getServiceName());
-    String serviceName = actionRequest.getServiceName();
-    String smokeTestRole = actionRequest.getCommandName();
+
+    String clusterName = actionExecutionContext.getClusterName();
+    String componentName = actionMetadata.getClient(resourceFilter.getServiceName());
+    String serviceName = resourceFilter.getServiceName();
+    String smokeTestRole = actionExecutionContext.getActionName();
     long nowTimestamp = System.currentTimeMillis();
-    Map<String, String> actionParameters = actionRequest.getParameters();
+    Map<String, String> actionParameters = actionExecutionContext.getParameters();
 
     String hostName;
     if (componentName != null) {
-      Map<String, ServiceComponentHost> components = clusters
-          .getCluster(clusterName).getService(actionRequest.getServiceName())
+      Map<String, ServiceComponentHost> components =
+        clusters.getCluster(clusterName).getService(serviceName)
           .getServiceComponent(componentName).getServiceComponentHosts();
 
       if (components.isEmpty()) {
         throw new AmbariException("Hosts not found, component="
-            + componentName + ", service=" + actionRequest.getServiceName()
-            + ", cluster=" + clusterName);
+            + componentName + ", service = " + serviceName
+            + ", cluster = " + clusterName);
+      }
+
+      List<String> candidateHosts = resourceFilter.getHostNames();
+      if (candidateHosts != null && !candidateHosts.isEmpty()) {
+        hostName = managementController.getHealthyHost
+          (new HashSet<String>(candidateHosts));
+
+        if (hostName == null) {
+          LOG.info("Unable to find a healthy host amongst the provided set of " +
+            "hosts. " + candidateHosts);
+        }
+      } else {
+        hostName = managementController.getHealthyHost(components.keySet());
       }
-      hostName = amc.getHealthyHost(components.keySet());
+
     } else {
       Map<String, ServiceComponent> components = clusters
-          .getCluster(clusterName).getService(actionRequest.getServiceName())
-          .getServiceComponents();
+        .getCluster(clusterName).getService(serviceName).getServiceComponents();
 
       if (components.isEmpty()) {
-        throw new AmbariException("Components not found, service="
-            + actionRequest.getServiceName() + ", cluster=" + clusterName);
+        throw new AmbariException("Components not found, service = "
+            + serviceName + ", cluster = " + clusterName);
       }
 
       ServiceComponent serviceComponent = components.values().iterator()
@@ -367,12 +353,11 @@ public class AmbariCustomCommandExecutionHelper {
 
       if (serviceComponent.getServiceComponentHosts().isEmpty()) {
         throw new AmbariException("Hosts not found, component="
-            + serviceComponent.getName() + ", service="
-            + actionRequest.getServiceName() + ", cluster=" + clusterName);
+            + serviceComponent.getName() + ", service = "
+            + serviceName + ", cluster = " + clusterName);
       }
 
-      hostName = serviceComponent.getServiceComponentHosts().keySet()
-          .iterator().next();
+      hostName = serviceComponent.getServiceComponentHosts().keySet().iterator().next();
     }
 
     addServiceCheckAction(stage, hostName, smokeTestRole, nowTimestamp,
@@ -392,17 +377,17 @@ public class AmbariCustomCommandExecutionHelper {
                                     String componentName,
                                     Map<String, String> actionParameters,
                                     Map<String, String> hostLevelParams)
-      throws AmbariException {
+                                    throws AmbariException {
 
     String clusterName = stage.getClusterName();
     Cluster cluster = clusters.getCluster(clusterName);
     StackId stackId = cluster.getDesiredStackVersion();
-    AmbariMetaInfo ambariMetaInfo = amc.getAmbariMetaInfo();
+    AmbariMetaInfo ambariMetaInfo = managementController.getAmbariMetaInfo();
     ServiceInfo serviceInfo =
         ambariMetaInfo.getServiceInfo(stackId.getStackName(),
             stackId.getStackVersion(), serviceName);
     StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
-        stackId.getStackVersion());
+      stackId.getStackVersion());
 
 
     stage.addHostRoleExecutionCommand(hostname,
@@ -419,7 +404,7 @@ public class AmbariCustomCommandExecutionHelper {
     Map<String, Map<String, String>> configurations =
         new TreeMap<String, Map<String, String>>();
     Map<String, Map<String, String>> configTags =
-        amc.findConfigurationTagsWithOverrides(cluster, hostname);
+        managementController.findConfigurationTagsWithOverrides(cluster, hostname);
 
     ExecutionCommand execCmd = stage.getExecutionCommandWrapper(hostname,
         smokeTestRole).getExecutionCommand();
@@ -489,46 +474,53 @@ public class AmbariCustomCommandExecutionHelper {
    * Processes decommission command. Modifies the host components as needed and then
    * calls into the implementation of a custom command
    */
-  private void addDecommissionAction(ExecuteActionRequest request, Stage stage,
-                                     Map<String, String> hostLevelParams)
-      throws AmbariException {
+  private void addDecommissionAction(ActionExecutionContext actionExecutionContext,
+                                     RequestResourceFilter resourceFilter,
+                                     Stage stage, Map<String, String> hostLevelParams)
+                                     throws AmbariException {
 
-    String clusterName = request.getClusterName();
+    String clusterName = actionExecutionContext.getClusterName();
     Cluster cluster = clusters.getCluster(clusterName);
-    String serviceName = request.getServiceName();
+    String serviceName = resourceFilter.getServiceName();
+    String componentName = resourceFilter.getComponentName();
+    List<String> hosts = resourceFilter.getHostNames();
 
-    if (request.getHosts() != null && request.getHosts().size() != 0) {
-      throw new AmbariException("Decommission command cannot be issued with target host(s) specified.");
+    if (hosts != null && !hosts.isEmpty()) {
+      throw new AmbariException("Decommission command cannot be issued with " +
+        "target host(s) specified.");
     }
 
     //Get all hosts to be added and removed
-    Set<String> excludedHosts = getHostList(request.getParameters(), DECOM_EXCLUDED_HOSTS);
-    Set<String> includedHosts = getHostList(request.getParameters(), DECOM_INCLUDED_HOSTS);
-    String slaveCompType = request.getParameters().get(DECOM_SLAVE_COMPONENT);
+    Set<String> excludedHosts = getHostList(actionExecutionContext.getParameters(),
+                                            DECOM_EXCLUDED_HOSTS);
+    Set<String> includedHosts = getHostList(actionExecutionContext.getParameters(),
+                                            DECOM_INCLUDED_HOSTS);
+    String slaveCompType = actionExecutionContext.getParameters().get(DECOM_SLAVE_COMPONENT);
 
     Set<String> cloneSet = new HashSet<String>(excludedHosts);
     cloneSet.retainAll(includedHosts);
     if (cloneSet.size() > 0) {
-      throw new AmbariException("Same host cannot be specified for inclusion as well as exclusion. Hosts: "
-          + cloneSet.toString());
+      throw new AmbariException("Same host cannot be specified for inclusion " +
+        "as well as exclusion. Hosts: " + cloneSet.toString());
     }
 
     Service service = cluster.getService(serviceName);
     if (service == null) {
-      throw new AmbariException("Specified service " + serviceName + " is not a valid/deployed service.");
+      throw new AmbariException("Specified service " + serviceName +
+        " is not a valid/deployed service.");
     }
 
-    String masterCompType = request.getComponentName();
+    String masterCompType = componentName;
     Map<String, ServiceComponent> svcComponents = service.getServiceComponents();
     if (!svcComponents.containsKey(masterCompType)) {
-      throw new AmbariException("Specified component " + masterCompType + " does not belong to service "
-          + serviceName + ".");
+      throw new AmbariException("Specified component " + masterCompType +
+        " does not belong to service " + serviceName + ".");
     }
 
     ServiceComponent masterComponent = svcComponents.get(masterCompType);
     if (!masterComponent.isMasterComponent()) {
-      throw new AmbariException("Specified component " + masterCompType + " is not a MASTER for service "
-          + serviceName + ".");
+      throw new AmbariException("Specified component " + masterCompType +
+        " is not a MASTER for service " + serviceName + ".");
     }
 
     if (!masterToSlaveMappingForDecom.containsKey(masterCompType)) {
@@ -542,7 +534,7 @@ public class AmbariCustomCommandExecutionHelper {
       throw new AmbariException("Component " + slaveCompType + " is not supported for decommissioning.");
     }
 
-    String isDrainOnlyRequest = request.getParameters().get(HBASE_MARK_DRAINING_ONLY);
+    String isDrainOnlyRequest = actionExecutionContext.getParameters().get(HBASE_MARK_DRAINING_ONLY);
     if (isDrainOnlyRequest != null && !slaveCompType.equals(Role.HBASE_REGIONSERVER.name())) {
       throw new AmbariException(HBASE_MARK_DRAINING_ONLY + " is not a valid parameter for " + masterCompType);
     }
@@ -557,7 +549,7 @@ public class AmbariCustomCommandExecutionHelper {
       }
     }
 
-    String alignMtnStateStr = request.getParameters().get(ALIGN_MAINTENANCE_STATE);
+    String alignMtnStateStr = actionExecutionContext.getParameters().get(ALIGN_MAINTENANCE_STATE);
     boolean alignMtnState = "true".equals(alignMtnStateStr);
     // Set/reset decommissioned flag on all components
     List<String> listOfExcludedHosts = new ArrayList<String>();
@@ -597,12 +589,18 @@ public class AmbariCustomCommandExecutionHelper {
       }
     }
 
-    StringBuilder commandDetail = getReadableDecommissionCommandDetail(request, includedHosts, listOfExcludedHosts);
+    StringBuilder commandDetail = getReadableDecommissionCommandDetail
+      (actionExecutionContext, includedHosts, listOfExcludedHosts);
 
     for (String hostName : masterSchs.keySet()) {
-      ExecuteActionRequest commandRequest = new ExecuteActionRequest(
-          request.getClusterName(), request.getCommandName(), request.getActionName(), request.getServiceName(),
-          masterComponent.getName(), Collections.singletonList(hostName), null);
+      RequestResourceFilter commandFilter = new RequestResourceFilter(serviceName,
+        masterComponent.getName(), Collections.singletonList(hostName));
+      List<RequestResourceFilter> resourceFilters = new ArrayList<RequestResourceFilter>();
+      resourceFilters.add(commandFilter);
+
+      ActionExecutionContext commandContext = new ActionExecutionContext(
+        clusterName, actionExecutionContext.getActionName(), resourceFilters
+      );
 
       String clusterHostInfoJson = StageUtils.getGson().toJson(
           StageUtils.getClusterHostInfo(clusters.getHostsForCluster(cluster.getClusterName()), cluster));
@@ -622,16 +620,17 @@ public class AmbariCustomCommandExecutionHelper {
 
       if (!serviceName.equals(Service.Type.HBASE.name()) || hostName.equals(primaryCandidate)) {
         commandParams.put(UPDATE_EXCLUDE_FILE_ONLY, "false");
-        addCustomCommandAction(commandRequest, stage, hostLevelParams, commandParams, commandDetail.toString());
+        addCustomCommandAction(commandContext, commandFilter, stage,
+          hostLevelParams, commandParams, commandDetail.toString());
       }
     }
   }
 
-  private StringBuilder getReadableDecommissionCommandDetail(ExecuteActionRequest request,
-                                                             Set<String> includedHosts,
-                                                             List<String> listOfExcludedHosts) {
+  private StringBuilder getReadableDecommissionCommandDetail(
+      ActionExecutionContext actionExecutionContext, Set<String> includedHosts,
+      List<String> listOfExcludedHosts) {
     StringBuilder commandDetail = new StringBuilder();
-    commandDetail.append(request.getCommandName());
+    commandDetail.append(actionExecutionContext.getActionName());
     if (listOfExcludedHosts.size() > 0) {
       commandDetail.append(", Excluded: ").append(StringUtils.join(listOfExcludedHosts, ','));
     }
@@ -642,156 +641,83 @@ public class AmbariCustomCommandExecutionHelper {
   }
 
   /**
-   * Creates and populates an EXECUTION_COMMAND for host
+   * Validate custom command and throw exception is invalid request.
+   * @param actionRequest
+   * @throws AmbariException
    */
-  public void createHostAction(Cluster cluster,
-                               Stage stage, ServiceComponentHost scHost,
-                               Map<String, Map<String, String>> configurations,
-                               Map<String, Map<String, String>> configTags,
-                               RoleCommand roleCommand,
-                               Map<String, String> commandParams,
-                               ServiceComponentHostEvent event)
-      throws AmbariException {
-
-    stage.addHostRoleExecutionCommand(scHost.getHostName(), Role.valueOf(scHost
-        .getServiceComponentName()), roleCommand,
-        event, scHost.getClusterName(),
-        scHost.getServiceName());
-    String serviceName = scHost.getServiceName();
-    String componentName = event.getServiceComponentName();
-    String hostname = scHost.getHostName();
-    String osType = clusters.getHost(hostname).getOsType();
-    StackId stackId = cluster.getDesiredStackVersion();
-    ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
-        stackId.getStackVersion(), serviceName);
-    ComponentInfo componentInfo = ambariMetaInfo.getComponent(
-        stackId.getStackName(), stackId.getStackVersion(),
-        serviceName, componentName);
-    StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
-        stackId.getStackVersion());
-
-    ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(),
-        scHost.getServiceComponentName()).getExecutionCommand();
+  public void validateAction(ExecuteActionRequest actionRequest) throws AmbariException {
 
-    Host host = clusters.getHost(scHost.getHostName());
+    List<RequestResourceFilter> resourceFilters = actionRequest.getResourceFilters();
 
-    // Hack - Remove passwords from configs
-    if (event.getServiceComponentName().equals(Role.HIVE_CLIENT.toString())) {
-      configHelper.applyCustomConfig(configurations, Configuration.HIVE_CONFIG_TAG,
-          Configuration.HIVE_METASTORE_PASSWORD_PROPERTY, "", true);
+    if (resourceFilters == null || resourceFilters.isEmpty()) {
+      throw new AmbariException("Command execution cannot proceed without a " +
+        "resource filter.");
     }
 
-    String jobtrackerHost = amc.getJobTrackerHost(cluster);
-    if (!scHost.getHostName().equals(jobtrackerHost)) {
-      if (configTags.get(Configuration.GLOBAL_CONFIG_TAG) != null) {
-        configHelper.applyCustomConfig(
-            configurations, Configuration.GLOBAL_CONFIG_TAG,
-            Configuration.RCA_ENABLED_PROPERTY, "false", false);
+    for (RequestResourceFilter resourceFilter : resourceFilters) {
+      if (resourceFilter.getServiceName() == null
+        || resourceFilter.getServiceName().isEmpty()
+        || actionRequest.getCommandName() == null
+        || actionRequest.getCommandName().isEmpty()) {
+        throw new AmbariException("Invalid resource filter : " + "cluster = "
+          + actionRequest.getClusterName() + ", service = "
+          + resourceFilter.getServiceName() + ", command = "
+          + actionRequest.getCommandName());
       }
-    }
 
-    execCmd.setConfigurations(configurations);
-    execCmd.setConfigurationTags(configTags);
-    if (commandParams == null) { // if not defined
-      commandParams = new TreeMap<String, String>();
-    }
-    commandParams.put(SCHEMA_VERSION, serviceInfo.getSchemaVersion());
-
-
-    // Get command script info for custom command/custom action
-    /*
-     * TODO: Custom actions are not supported yet, that's why we just pass
-     * component main commandScript to agent. This script is only used for
-     * default commads like INSTALL/STOP/START/CONFIGURE
-     */
-    String commandTimeout = configs.getDefaultAgentTaskTimeout();
-    CommandScriptDefinition script = componentInfo.getCommandScript();
-    if (serviceInfo.getSchemaVersion().equals(AmbariMetaInfo.SCHEMA_VERSION_2)) {
-      if (script != null) {
-        commandParams.put(SCRIPT, script.getScript());
-        commandParams.put(SCRIPT_TYPE, script.getScriptType().toString());
-        if (script.getTimeout() > 0) {
-          commandTimeout = String.valueOf(script.getTimeout());
-        }
-      } else {
-        String message = String.format("Component %s of service %s has no " +
-            "command script defined", componentName, serviceName);
-        throw new AmbariException(message);
+      if (!isServiceCheckCommand(actionRequest.getCommandName(), resourceFilter.getServiceName())
+        && !isValidCustomCommand(actionRequest, resourceFilter)) {
+        throw new AmbariException(
+          "Unsupported action " + actionRequest.getCommandName() +
+            " for Service: " + resourceFilter.getServiceName()
+            + " and Component: " + resourceFilter.getComponentName());
       }
     }
-    commandParams.put(COMMAND_TIMEOUT, commandTimeout);
-    commandParams.put(SERVICE_PACKAGE_FOLDER,
-        serviceInfo.getServicePackageFolder());
-    commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
-
-    execCmd.setCommandParams(commandParams);
+  }
 
-    String repoInfo = getRepoInfo(cluster, host);
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("Sending repo information to agent"
-          + ", hostname=" + scHost.getHostName()
-          + ", clusterName=" + cluster.getClusterName()
-          + ", stackInfo=" + stackId.getStackId()
-          + ", repoInfo=" + repoInfo);
-    }
-
-    Map<String, String> hostParams = new TreeMap<String, String>();
-    hostParams.put(REPO_INFO, repoInfo);
-    hostParams.put(JDK_LOCATION, amc.getJdkResourceUrl());
-    hostParams.put(JAVA_HOME, amc.getJavaHome());
-    hostParams.put(JDK_NAME, amc.getJDKName());
-    hostParams.put(JCE_NAME, amc.getJCEName());
-    hostParams.put(STACK_NAME, stackId.getStackName());
-    hostParams.put(STACK_VERSION, stackId.getStackVersion());
-    hostParams.put(DB_NAME, amc.getServerDB());
-    hostParams.put(MYSQL_JDBC_URL, amc.getMysqljdbcUrl());
-    hostParams.put(ORACLE_JDBC_URL, amc.getOjdbcUrl());
-    hostParams.putAll(amc.getRcaParameters());
-
-    // Write down os specific info for the service
-    ServiceOsSpecific anyOs = null;
-    if (serviceInfo.getOsSpecifics().containsKey(AmbariMetaInfo.ANY_OS)) {
-      anyOs = serviceInfo.getOsSpecifics().get(AmbariMetaInfo.ANY_OS);
-    }
-    ServiceOsSpecific hostOs = null;
-    if (serviceInfo.getOsSpecifics().containsKey(osType)) {
-      hostOs = serviceInfo.getOsSpecifics().get(osType);
-      // Choose repo that is relevant for host
-      ServiceOsSpecific.Repo serviceRepo = hostOs.getRepo();
-      if (serviceRepo != null) {
-        String serviceRepoInfo = gson.toJson(serviceRepo);
-        hostParams.put(SERVICE_REPO_INFO, serviceRepoInfo);
+  /**
+   * Other than Service_Check and Decommission all other commands are pass-through
+   * @param actionExecutionContext received request to execute a command
+   * @param stage the initial stage for task creation
+   * @param hostLevelParams specific parameters for the hosts
+   * @throws AmbariException
+   */
+  public void addExecutionCommandsToStage(ActionExecutionContext actionExecutionContext,
+                                          Stage stage, Map<String, String> hostLevelParams)
+                                          throws AmbariException {
+
+    List<RequestResourceFilter> resourceFilters = actionExecutionContext.getResourceFilters();
+
+    for (RequestResourceFilter resourceFilter : resourceFilters) {
+      LOG.debug("Received a command execution request"
+        + ", clusterName=" + actionExecutionContext.getClusterName()
+        + ", serviceName=" + resourceFilter.getServiceName()
+        + ", request=" + actionExecutionContext.toString());
+
+      if (actionExecutionContext.getActionName().contains(SERVICE_CHECK_COMMAND_NAME)) {
+        findHostAndAddServiceCheckAction(actionExecutionContext,
+          resourceFilter, stage, hostLevelParams);
+      } else if (actionExecutionContext.getActionName().equals(DECOMMISSION_COMMAND_NAME)) {
+        addDecommissionAction(actionExecutionContext, resourceFilter, stage, hostLevelParams);
+      } else if (isValidCustomCommand(actionExecutionContext, resourceFilter)) {
+        String commandDetail = getReadableCustomCommandDetail(actionExecutionContext, resourceFilter);
+        addCustomCommandAction(actionExecutionContext, resourceFilter, stage,
+          hostLevelParams, null, commandDetail);
+      } else {
+        throw new AmbariException("Unsupported action " +
+          actionExecutionContext.getActionName());
       }
     }
-    // Build package list that is relevant for host
-    List<ServiceOsSpecific.Package> packages =
-        new ArrayList<ServiceOsSpecific.Package>();
-    if (anyOs != null) {
-      packages.addAll(anyOs.getPackages());
-    }
-
-    if (hostOs != null) {
-      packages.addAll(hostOs.getPackages());
-    }
-    String packageList = gson.toJson(packages);
-    hostParams.put(PACKAGE_LIST, packageList);
-
-    if (configs.getServerDBName().equalsIgnoreCase(Configuration
-        .ORACLE_DB_NAME)) {
-      hostParams.put(DB_DRIVER_FILENAME, configs.getOjdbcJarName());
-    } else if (configs.getServerDBName().equalsIgnoreCase(Configuration
-        .MYSQL_DB_NAME)) {
-      hostParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName());
-    }
-    execCmd.setHostLevelParams(hostParams);
-
-    Map<String, String> roleParams = new TreeMap<String, String>();
-    execCmd.setRoleParams(roleParams);
-    
-    execCmd.setPassiveInfo(MaintenanceStateHelper.getMaintenanceHostCompoments(clusters, cluster));
   }
 
-  private String getRepoInfo(Cluster cluster, Host host) throws AmbariException {
+  /**
+   * Get repository info given a cluster and host.
+   * @param cluster
+   * @param host
+   * @return
+   * @throws AmbariException
+   */
+  public String getRepoInfo(Cluster cluster, Host host) throws AmbariException {
     StackId stackId = cluster.getDesiredStackVersion();
 
     Map<String, List<RepositoryInfo>> repos = ambariMetaInfo.getRepository(


[2/3] AMBARI-4791. API call to restart all components on one or more hosts should result in one request. (swagle)

Posted by sw...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 7e7f381..60688af 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -18,22 +18,11 @@
 
 package org.apache.ambari.server.controller;
 
-import java.io.File;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.text.MessageFormat;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeMap;
-
+import com.google.gson.Gson;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Singleton;
+import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.ClusterNotFoundException;
 import org.apache.ambari.server.DuplicateResourceException;
@@ -52,9 +41,11 @@ import org.apache.ambari.server.actionmanager.Request;
 import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.actionmanager.StageFactory;
+import org.apache.ambari.server.agent.ExecutionCommand;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.internal.URLStreamProvider;
+import org.apache.ambari.server.customactions.ActionDefinition;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.metadata.RoleCommandOrder;
 import org.apache.ambari.server.scheduler.ExecutionScheduleManager;
@@ -64,14 +55,15 @@ import org.apache.ambari.server.security.authorization.Users;
 import org.apache.ambari.server.stageplanner.RoleGraph;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.CommandScriptDefinition;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigFactory;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.Host;
 import org.apache.ambari.server.state.HostState;
-import org.apache.ambari.server.state.OperatingSystemInfo;
 import org.apache.ambari.server.state.MaintenanceState;
+import org.apache.ambari.server.state.OperatingSystemInfo;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.Service;
@@ -82,14 +74,15 @@ import org.apache.ambari.server.state.ServiceComponentHostEvent;
 import org.apache.ambari.server.state.ServiceComponentHostFactory;
 import org.apache.ambari.server.state.ServiceFactory;
 import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.ServiceOsSpecific;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.state.State;
 import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.scheduler.RequestExecutionFactory;
-import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostDisableEvent;
+import org.apache.ambari.server.state.svccomphost.ServiceComponentHostInstallEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostRestoreEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStartEvent;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostStopEvent;
@@ -101,17 +94,48 @@ import org.apache.http.client.utils.URIBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.gson.Gson;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Singleton;
-import com.google.inject.persist.Transactional;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TreeMap;
 
-import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.*;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_DRIVER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_PASSWORD;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.AMBARI_DB_RCA_USERNAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_TIMEOUT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JAVA_HOME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JCE_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_LOCATION;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.JDK_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.MYSQL_JDBC_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.ORACLE_JDBC_URL;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.PACKAGE_LIST;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.REPO_INFO;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCHEMA_VERSION;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TYPE;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_REPO_INFO;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
 
 @Singleton
-public class AmbariManagementControllerImpl implements
-    AmbariManagementController {
+public class AmbariManagementControllerImpl implements AmbariManagementController {
 
   private final static Logger LOG =
       LoggerFactory.getLogger(AmbariManagementControllerImpl.class);
@@ -185,7 +209,8 @@ public class AmbariManagementControllerImpl implements
 
   @Inject
   private AmbariCustomCommandExecutionHelper customCommandExecutionHelper;
-  final private AmbariActionExecutionHelper actionExecutionHelper;
+  @Inject
+  private AmbariActionExecutionHelper actionExecutionHelper;
 
   @Inject
   public AmbariManagementControllerImpl(ActionManager actionManager,
@@ -227,9 +252,6 @@ public class AmbariManagementControllerImpl implements
       this.mysqljdbcUrl = null;
       this.serverDB = null;
     }
-
-    this.actionExecutionHelper = new AmbariActionExecutionHelper(
-        this.actionMetadata, this.clusters, this);
   }
   
   public String getAmbariServerURI(String path) {
@@ -1100,6 +1122,180 @@ public class AmbariManagementControllerImpl implements
     return executionScheduleManager;
   }
 
+  /**
+   * Creates and populates an EXECUTION_COMMAND for host
+   */
+  private void createHostAction(Cluster cluster,
+                                Stage stage, ServiceComponentHost scHost,
+                                Map<String, Map<String, String>> configurations,
+                                Map<String, Map<String, String>> configTags,
+                                RoleCommand roleCommand,
+                                Map<String, String> commandParams,
+                                ServiceComponentHostEvent event)
+    throws AmbariException {
+
+    stage.addHostRoleExecutionCommand(scHost.getHostName(), Role.valueOf(scHost
+      .getServiceComponentName()), roleCommand,
+      event, scHost.getClusterName(),
+      scHost.getServiceName());
+    String serviceName = scHost.getServiceName();
+    String componentName = event.getServiceComponentName();
+    String hostname = scHost.getHostName();
+    String osType = clusters.getHost(hostname).getOsType();
+    StackId stackId = cluster.getDesiredStackVersion();
+    ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
+      stackId.getStackVersion(), serviceName);
+    ComponentInfo componentInfo = ambariMetaInfo.getComponent(
+      stackId.getStackName(), stackId.getStackVersion(),
+      serviceName, componentName);
+    StackInfo stackInfo = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+      stackId.getStackVersion());
+
+    ExecutionCommand execCmd = stage.getExecutionCommandWrapper(scHost.getHostName(),
+      scHost.getServiceComponentName()).getExecutionCommand();
+
+    Host host = clusters.getHost(scHost.getHostName());
+
+    // Hack - Remove passwords from configs
+    if (event.getServiceComponentName().equals(Role.HIVE_CLIENT.toString())) {
+      configHelper.applyCustomConfig(configurations, Configuration.HIVE_CONFIG_TAG,
+        Configuration.HIVE_METASTORE_PASSWORD_PROPERTY, "", true);
+    }
+
+    String jobtrackerHost = getJobTrackerHost(cluster);
+    if (!scHost.getHostName().equals(jobtrackerHost)) {
+      if (configTags.get(Configuration.GLOBAL_CONFIG_TAG) != null) {
+        configHelper.applyCustomConfig(
+          configurations, Configuration.GLOBAL_CONFIG_TAG,
+          Configuration.RCA_ENABLED_PROPERTY, "false", false);
+      }
+    }
+
+    execCmd.setConfigurations(configurations);
+    execCmd.setConfigurationTags(configTags);
+    if (commandParams == null) { // if not defined
+      commandParams = new TreeMap<String, String>();
+    }
+    commandParams.put(SCHEMA_VERSION, serviceInfo.getSchemaVersion());
+
+
+    // Get command script info for custom command/custom action
+    /*
+     * TODO: Custom actions are not supported yet, that's why we just pass
+     * component main commandScript to agent. This script is only used for
+     * default commads like INSTALL/STOP/START/CONFIGURE
+     */
+    String commandTimeout = configs.getDefaultAgentTaskTimeout();
+    CommandScriptDefinition script = componentInfo.getCommandScript();
+    if (serviceInfo.getSchemaVersion().equals(AmbariMetaInfo.SCHEMA_VERSION_2)) {
+      if (script != null) {
+        commandParams.put(SCRIPT, script.getScript());
+        commandParams.put(SCRIPT_TYPE, script.getScriptType().toString());
+        if (script.getTimeout() > 0) {
+          commandTimeout = String.valueOf(script.getTimeout());
+        }
+      } else {
+        String message = String.format("Component %s of service %s has no " +
+          "command script defined", componentName, serviceName);
+        throw new AmbariException(message);
+      }
+    }
+    commandParams.put(COMMAND_TIMEOUT, commandTimeout);
+    commandParams.put(SERVICE_PACKAGE_FOLDER,
+      serviceInfo.getServicePackageFolder());
+    commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
+
+    execCmd.setCommandParams(commandParams);
+
+    String repoInfo = customCommandExecutionHelper.getRepoInfo(cluster, host);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Sending repo information to agent"
+        + ", hostname=" + scHost.getHostName()
+        + ", clusterName=" + cluster.getClusterName()
+        + ", stackInfo=" + stackId.getStackId()
+        + ", repoInfo=" + repoInfo);
+    }
+
+    Map<String, String> hostParams = new TreeMap<String, String>();
+    hostParams.put(REPO_INFO, repoInfo);
+    hostParams.put(JDK_LOCATION, getJdkResourceUrl());
+    hostParams.put(JAVA_HOME, getJavaHome());
+    hostParams.put(JDK_NAME, getJDKName());
+    hostParams.put(JCE_NAME, getJCEName());
+    hostParams.put(STACK_NAME, stackId.getStackName());
+    hostParams.put(STACK_VERSION, stackId.getStackVersion());
+    hostParams.put(DB_NAME, getServerDB());
+    hostParams.put(MYSQL_JDBC_URL, getMysqljdbcUrl());
+    hostParams.put(ORACLE_JDBC_URL, getOjdbcUrl());
+    hostParams.putAll(getRcaParameters());
+
+    // Write down os specific info for the service
+    ServiceOsSpecific anyOs = null;
+    if (serviceInfo.getOsSpecifics().containsKey(AmbariMetaInfo.ANY_OS)) {
+      anyOs = serviceInfo.getOsSpecifics().get(AmbariMetaInfo.ANY_OS);
+    }
+    ServiceOsSpecific hostOs = null;
+    if (serviceInfo.getOsSpecifics().containsKey(osType)) {
+      hostOs = serviceInfo.getOsSpecifics().get(osType);
+      // Choose repo that is relevant for host
+      ServiceOsSpecific.Repo serviceRepo = hostOs.getRepo();
+      if (serviceRepo != null) {
+        String serviceRepoInfo = gson.toJson(serviceRepo);
+        hostParams.put(SERVICE_REPO_INFO, serviceRepoInfo);
+      }
+    }
+    // Build package list that is relevant for host
+    List<ServiceOsSpecific.Package> packages =
+      new ArrayList<ServiceOsSpecific.Package>();
+    if (anyOs != null) {
+      packages.addAll(anyOs.getPackages());
+    }
+
+    if (hostOs != null) {
+      packages.addAll(hostOs.getPackages());
+    }
+    String packageList = gson.toJson(packages);
+    hostParams.put(PACKAGE_LIST, packageList);
+
+    if (configs.getServerDBName().equalsIgnoreCase(Configuration
+      .ORACLE_DB_NAME)) {
+      hostParams.put(DB_DRIVER_FILENAME, configs.getOjdbcJarName());
+    } else if (configs.getServerDBName().equalsIgnoreCase(Configuration
+      .MYSQL_DB_NAME)) {
+      hostParams.put(DB_DRIVER_FILENAME, configs.getMySQLJarName());
+    }
+    execCmd.setHostLevelParams(hostParams);
+
+    Map<String, String> roleParams = new TreeMap<String, String>();
+    execCmd.setRoleParams(roleParams);
+
+    execCmd.setPassiveInfo(MaintenanceStateHelper.getMaintenanceHostCompoments(clusters, cluster));
+  }
+
+  private ActionExecutionContext getActionExecutionContext
+      (ExecuteActionRequest actionRequest) throws AmbariException {
+
+    if (actionRequest.isCommand()) {
+      return new ActionExecutionContext(actionRequest.getClusterName(),
+        actionRequest.getCommandName(), actionRequest.getResourceFilters(),
+        actionRequest.getParameters());
+    } else {
+
+    ActionDefinition actionDef = ambariMetaInfo.getActionDefinition(actionRequest.getActionName());
+
+    if (actionDef == null) {
+      throw new AmbariException("Action " + actionRequest.getActionName() + " does not exist");
+    }
+
+    return new ActionExecutionContext(actionRequest.getClusterName(),
+      actionRequest.getActionName(), actionRequest.getResourceFilters(),
+      actionRequest.getParameters(), actionDef.getTargetType(),
+      actionDef.getDefaultTimeout(), actionDef.getTargetService(),
+      actionDef.getTargetComponent());
+
+    }
+  }
+
   private List<Stage> doStageCreation(Cluster cluster,
       Map<State, List<Service>> changedServices,
       Map<State, List<ServiceComponent>> changedComps,
@@ -1294,9 +1490,8 @@ public class AmbariManagementControllerImpl implements
               }
             }
 
-            customCommandExecutionHelper.createHostAction(cluster, stage, scHost,
-                    configurations, configTags,
-                    roleCommand, requestParameters, event);
+            createHostAction(cluster, stage, scHost, configurations, configTags,
+              roleCommand, requestParameters, event);
           }
         }
       }
@@ -1319,8 +1514,8 @@ public class AmbariManagementControllerImpl implements
         }
 
         customCommandExecutionHelper.addServiceCheckAction(stage, clientHost,
-            smokeTestRole, nowTimestamp, serviceName,
-            null, null, createDefaultHostParams(cluster));
+          smokeTestRole, nowTimestamp, serviceName,
+          null, null, createDefaultHostParams(cluster));
       }
 
       RoleCommandOrder rco = getRoleCommandOrder(cluster);
@@ -2118,11 +2313,15 @@ public class AmbariManagementControllerImpl implements
 
     Cluster cluster = clusters.getCluster(clusterName);
 
-    ActionExecutionContext actionExecContext = null;
+    LOG.info("Received action execution request"
+      + ", clusterName=" + actionRequest.getClusterName()
+      + ", request=" + actionRequest.toString());
+
+    ActionExecutionContext actionExecContext = getActionExecutionContext(actionRequest);
     if (actionRequest.isCommand()) {
-      customCommandExecutionHelper.validateCustomCommand(actionRequest);
+      customCommandExecutionHelper.validateAction(actionRequest);
     } else {
-      actionExecContext = actionExecutionHelper.validateCustomAction(actionRequest, cluster);
+      actionExecutionHelper.validateAction(actionRequest);
     }
 
     Map<String, Set<String>> clusterHostInfo = StageUtils.getClusterHostInfo(
@@ -2134,9 +2333,9 @@ public class AmbariManagementControllerImpl implements
     Map<String, String> params = createDefaultHostParams(cluster);
 
     if (actionRequest.isCommand()) {
-      customCommandExecutionHelper.addAction(actionRequest, stage, params);
+      customCommandExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, params);
     } else {
-      actionExecutionHelper.addAction(actionExecContext, stage, configs, hostsMap, params);
+      actionExecutionHelper.addExecutionCommandsToStage(actionExecContext, stage, params);
     }
 
     RoleCommandOrder rco = this.getRoleCommandOrder(cluster);
@@ -2313,7 +2512,6 @@ public class AmbariManagementControllerImpl implements
       }
     }
   }
-  
 
   @Override
   public Set<StackVersionResponse> getStackVersions(

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index b9f62e4..2361af7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -131,6 +131,7 @@ public class AmbariServer {
     return clusterController;
   }
 
+  @SuppressWarnings("deprecation")
   public void run() throws Exception {
     // Initialize meta info before heartbeat monitor
     ambariMetaInfo.init();

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
index ae57af2..adb78c3 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
@@ -37,7 +37,6 @@ import org.apache.ambari.server.controller.internal.HostComponentResourceProvide
 import org.apache.ambari.server.controller.internal.HostResourceProvider;
 import org.apache.ambari.server.controller.internal.ServiceResourceProvider;
 import org.apache.ambari.server.controller.spi.ResourceProvider;
-import org.apache.ambari.server.orm.DBAccessorImpl;
 import org.apache.ambari.server.orm.PersistenceType;
 import org.apache.ambari.server.scheduler.ExecutionScheduler;
 import org.apache.ambari.server.scheduler.ExecutionSchedulerImpl;
@@ -71,12 +70,13 @@ import org.apache.ambari.server.state.scheduler.RequestExecutionImpl;
 import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl;
 import org.springframework.security.crypto.password.PasswordEncoder;
 import org.springframework.security.crypto.password.StandardPasswordEncoder;
+
 import java.security.SecureRandom;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
+
 import com.google.gson.GsonBuilder;
-import org.apache.ambari.server.orm.DBAccessor;
 import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_JDBC_DDL_FILE;
 import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_ONLY;
 import static org.eclipse.persistence.config.PersistenceUnitProperties.CREATE_OR_EXTEND;
@@ -247,7 +247,6 @@ public class ControllerModule extends AbstractModule {
     install(new FactoryModuleBuilder().build(RequestFactory.class));
 
     bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class);
-    bind(DBAccessor.class).to(DBAccessorImpl.class);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
index f8dd908..3b5afcd 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ExecuteActionRequest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.ambari.server.controller;
 
-import org.apache.ambari.server.utils.StageUtils;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -28,38 +28,36 @@ import java.util.Map;
  * Helper class to capture details used to create action or custom commands
  */
 public class ExecuteActionRequest {
-  private String clusterName;
-  private String commandName;
+  private final String clusterName;
+  private final String commandName;
+  private final List<RequestResourceFilter> resourceFilters;
   private String actionName;
-  private String serviceName;
-  private String componentName;
-  private List<String> hosts;
   private Map<String, String> parameters;
 
   public ExecuteActionRequest(String clusterName, String commandName,
-                              String actionName, String serviceName, String componentName,
-                              List<String> hosts, Map<String, String> parameters) {
-    this(clusterName, commandName, serviceName, parameters);
+                              String actionName,
+                              List<RequestResourceFilter> resourceFilters,
+                              Map<String, String> parameters) {
+    this(clusterName, commandName, parameters);
     this.actionName = actionName;
-    this.componentName = componentName;
-    if (hosts != null) {
-      this.hosts.addAll(hosts);
+    if (resourceFilters != null) {
+      this.resourceFilters.addAll(resourceFilters);
     }
   }
 
   /**
-   * Create an ExecuteActionRequest to execute a command
+   * Create an ExecuteActionRequest to execute a command.
+   * No filters.
    */
-  public ExecuteActionRequest(String clusterName, String commandName, String serviceName,
-                              Map<String, String> parameters) {
+  public ExecuteActionRequest(String clusterName, String commandName, Map<String, String> parameters) {
     this.clusterName = clusterName;
     this.commandName = commandName;
-    this.serviceName = serviceName;
+    this.actionName = null;
     this.parameters = new HashMap<String, String>();
     if (parameters != null) {
       this.parameters.putAll(parameters);
     }
-    this.hosts = new ArrayList<String>();
+    this.resourceFilters = new ArrayList<RequestResourceFilter>();
   }
 
   public String getClusterName() {
@@ -74,22 +72,14 @@ public class ExecuteActionRequest {
     return actionName;
   }
 
-  public String getServiceName() {
-    return serviceName;
-  }
-
-  public String getComponentName() {
-    return componentName;
+  public List<RequestResourceFilter> getResourceFilters() {
+    return resourceFilters;
   }
 
   public Map<String, String> getParameters() {
     return parameters;
   }
 
-  public List<String> getHosts() {
-    return hosts;
-  }
-
   public Boolean isCommand() {
     return actionName == null || actionName.isEmpty();
   }
@@ -101,9 +91,7 @@ public class ExecuteActionRequest {
         append(", action :" + actionName).
         append(", command :" + commandName).
         append(", inputs :" + parameters.toString()).
-        append(", targetService :" + serviceName).
-        append(", targetComponent :" + componentName).
-        append(", targetHosts :" + hosts.toString()).
+        append(", resourceFilters: " + resourceFilters).
         append(", clusterName :" + clusterName).toString();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
index 5251b3d..b78f68f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/MaintenanceStateHelper.java
@@ -17,14 +17,17 @@
  */
 package org.apache.ambari.server.controller;
 
+import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
-
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.HostNotFoundException;
 import org.apache.ambari.server.RoleCommand;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Host;
@@ -32,7 +35,6 @@ import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
 import org.apache.ambari.server.state.ServiceComponentHost;
-
 import com.google.inject.Inject;
 import com.google.inject.Injector;
 
@@ -143,11 +145,14 @@ public class MaintenanceStateHelper {
     
     // return the first one, just like amc.createStages()
     RequestStatusResponse response = null;
-    
+
+    RequestResourceFilter resourceFilter =
+      new RequestResourceFilter(NAGIOS_SERVICE, NAGIOS_COMPONENT, null);
+
     for (String clusterName : clusterNames) {
       ExecuteActionRequest actionRequest = new ExecuteActionRequest(
-          clusterName, RoleCommand.ACTIONEXECUTE.name(),
-          NAGIOS_ACTION_NAME, NAGIOS_SERVICE, NAGIOS_COMPONENT, null, params);
+        clusterName, null, NAGIOS_ACTION_NAME,
+        Collections.singletonList(resourceFilter), params);
       
       if (null == response)
         response = amc.createAction(actionRequest, requestProperties);

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceFilter.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceFilter.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceFilter.java
new file mode 100644
index 0000000..8a492a2
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceFilter.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.internal;
+
+import org.codehaus.jackson.annotate.JsonProperty;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class RequestResourceFilter {
+  private String serviceName;
+  private String componentName;
+  private final List<String> hostNames = new ArrayList<String>();
+
+  public RequestResourceFilter() {
+
+  }
+
+  public RequestResourceFilter(String serviceName, String componentName, List<String> hostNames) {
+    this.serviceName = serviceName;
+    this.componentName = componentName;
+    if (hostNames != null) {
+      this.hostNames.addAll(hostNames);
+    }
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("service_name")
+  public String getServiceName() {
+    return serviceName;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("component_name")
+  public String getComponentName() {
+    return componentName;
+  }
+
+  @JsonSerialize(include = JsonSerialize.Inclusion.NON_EMPTY)
+  @JsonProperty("hosts")
+  public List<String> getHostNames() {
+    return hostNames;
+  }
+
+  @Override
+  public String toString() {
+    return "RequestResourceFilter{" +
+      "serviceName='" + serviceName + '\'' +
+      ", componentName='" + componentName + '\'' +
+      ", hostNames=" + hostNames +
+      '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
index 92d1476..5b3ff8a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RequestResourceProvider.java
@@ -17,12 +17,14 @@
  */
 package org.apache.ambari.server.controller.internal;
 
+import com.google.gson.Gson;
+import com.google.gson.reflect.TypeToken;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.actionmanager.HostRoleCommand;
 import org.apache.ambari.server.actionmanager.HostRoleStatus;
-import org.apache.ambari.server.controller.ExecuteActionRequest;
 import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.ExecuteActionRequest;
 import org.apache.ambari.server.controller.RequestStatusResponse;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
@@ -33,9 +35,9 @@ import org.apache.ambari.server.controller.spi.Resource;
 import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
-import org.apache.ambari.server.controller.utilities.PropertyHelper;
 import org.apache.ambari.server.state.Clusters;
 
+import java.lang.reflect.Type;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -62,9 +64,7 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
   public static final String REQUEST_SOURCE_SCHEDULE_HREF = "Requests/request_schedule/href";
   protected static final String REQUEST_TYPE_ID = "Requests/type";
   protected static final String REQUEST_INPUTS_ID = "Requests/inputs";
-  protected static final String REQUEST_TARGET_SERVICE_ID = "Requests/target_service";
-  protected static final String REQUEST_TARGET_COMPONENT_ID = "Requests/target_component";
-  protected static final String REQUEST_TARGET_HOSTS_ID = "Requests/target_hosts";
+  protected static final String REQUEST_RESOURCE_FILTER_ID = "Requests/resources";
   protected static final String REQUEST_CREATE_TIME_ID = "Requests/create_time";
   protected static final String REQUEST_START_TIME_ID = "Requests/start_time";
   protected static final String REQUEST_END_TIME_ID = "Requests/end_time";
@@ -77,13 +77,11 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
   protected static final String REQUEST_PROGRESS_PERCENT_ID = "Requests/progress_percent";
   protected static final String COMMAND_ID = "command";
   protected static final String ACTION_ID = "action";
-  protected static final String HOSTS_ID = "hosts";
-  protected static final String SERVICE_NAME_ID = "service_name";
-  protected static final String COMPONENT_NAME_ID = "component_name";
   protected static final String INPUTS_ID = "parameters";
   private static Set<String> pkPropertyIds =
       new HashSet<String>(Arrays.asList(new String[]{
           REQUEST_ID_PROPERTY_ID}));
+  private Gson gson = new Gson();
 
   // ----- Constructors ----------------------------------------------------
 
@@ -165,6 +163,7 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
   // ----- utility methods --------------------------------------------------
 
   // Get request to execute an action/command
+  @SuppressWarnings("unchecked")
   private ExecuteActionRequest getActionRequest(Request request) {
     Map<String, String> requestInfoProperties = request.getRequestInfoProperties();
     Map<String, Object> propertyMap = request.getProperties().iterator().next();
@@ -184,20 +183,13 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
       actionName = requestInfoProperties.get(ACTION_ID);
     }
 
-    String hostList = requestInfoProperties.get(HOSTS_ID);
-    List<String> hosts = new ArrayList<String>();
-    if (hostList != null && !hostList.isEmpty()) {
-      for (String hostname : hostList.split(",")) {
-        String trimmedName = hostname.trim();
-        if (!trimmedName.isEmpty()) {
-          hosts.add(hostname.trim());
-        }
-      }
+    List<RequestResourceFilter> resourceFilters = null;
+    Object resourceFilterObj = propertyMap.get(REQUEST_RESOURCE_FILTER_ID);
+    if (resourceFilterObj != null) {
+      Type filterListType = new TypeToken<List<RequestResourceFilter>>(){}.getType();
+      resourceFilters = gson.fromJson((String) resourceFilterObj, filterListType);
     }
 
-    String serviceName = requestInfoProperties.get(SERVICE_NAME_ID);
-    String componentName = requestInfoProperties.get(COMPONENT_NAME_ID);
-
     Map<String, String> params = new HashMap<String, String>();
     String keyPrefix = "/" + INPUTS_ID + "/";
     for (String key : requestInfoProperties.keySet()) {
@@ -207,13 +199,11 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
     }
 
     return new ExecuteActionRequest(
-        (String) propertyMap.get(REQUEST_CLUSTER_NAME_PROPERTY_ID),
-        commandName,
-        actionName,
-        serviceName,
-        componentName,
-        hosts,
-        params);
+      (String) propertyMap.get(REQUEST_CLUSTER_NAME_PROPERTY_ID),
+      commandName,
+      actionName,
+      resourceFilters,
+      params);
   }
 
   // Get all of the request resources for the given properties
@@ -286,9 +276,7 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
     setResourceProperty(resource, REQUEST_CONTEXT_ID, request.getRequestContext(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_TYPE_ID, request.getRequestType(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_INPUTS_ID, request.getInputs(), requestedPropertyIds);
-    setResourceProperty(resource, REQUEST_TARGET_SERVICE_ID, request.getTargetService(), requestedPropertyIds);
-    setResourceProperty(resource, REQUEST_TARGET_COMPONENT_ID, request.getTargetComponent(), requestedPropertyIds);
-    setResourceProperty(resource, REQUEST_TARGET_HOSTS_ID, request.getTargetHosts(), requestedPropertyIds);
+    setResourceProperty(resource, REQUEST_RESOURCE_FILTER_ID, request.getResourceFilters(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_CREATE_TIME_ID, request.getCreateTime(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_START_TIME_ID, request.getStartTime(), requestedPropertyIds);
     setResourceProperty(resource, REQUEST_END_TIME_ID, request.getEndTime(), requestedPropertyIds);
@@ -421,4 +409,5 @@ public class RequestResourceProvider extends AbstractControllerResourceProvider
 
     return resource;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
index 072b4ed..17fbe2f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestEntity.java
@@ -29,7 +29,6 @@ import javax.persistence.EnumType;
 import javax.persistence.Enumerated;
 import javax.persistence.Id;
 import javax.persistence.JoinColumn;
-import javax.persistence.Lob;
 import javax.persistence.ManyToOne;
 import javax.persistence.OneToMany;
 import javax.persistence.Table;
@@ -63,18 +62,6 @@ public class RequestEntity {
   @Basic
   private String inputs;
 
-  @Column(name = "target_service")
-  @Basic
-  private String targetService;
-
-  @Column(name = "target_component")
-  @Basic
-  private String targetComponent;
-
-  @Column(name = "target_hosts")
-  @Lob
-  private String targetHosts;
-
   @Column(name = "request_type")
   @Enumerated(value = EnumType.STRING)
   private RequestType requestType;
@@ -98,6 +85,9 @@ public class RequestEntity {
   @OneToMany(mappedBy = "request")
   private Collection<StageEntity> stages;
 
+  @OneToMany(mappedBy = "requestEntity")
+  private Collection<RequestResourceFilterEntity> resourceFilterEntities;
+
   @ManyToOne(cascade = {CascadeType.MERGE})
   @JoinColumn(name = "cluster_id", referencedColumnName = "cluster_id")
   private ClusterEntity cluster;
@@ -170,30 +160,6 @@ public class RequestEntity {
     this.inputs = inputs;
   }
 
-  public String getTargetService() {
-    return targetService;
-  }
-
-  public void setTargetService(String targetService) {
-    this.targetService = targetService;
-  }
-
-  public String getTargetComponent() {
-    return targetComponent;
-  }
-
-  public void setTargetComponent(String targetComponent) {
-    this.targetComponent = targetComponent;
-  }
-
-  public String getTargetHosts() {
-    return targetHosts;
-  }
-
-  public void setTargetHosts(String targetHosts) {
-    this.targetHosts = targetHosts;
-  }
-
   public RequestType getRequestType() {
     return requestType;
   }
@@ -206,8 +172,17 @@ public class RequestEntity {
     return clusterId;
   }
 
+  public Collection<RequestResourceFilterEntity> getResourceFilterEntities() {
+    return resourceFilterEntities;
+  }
+
+  public void setResourceFilterEntities(Collection<RequestResourceFilterEntity> resourceFilterEntities) {
+    this.resourceFilterEntities = resourceFilterEntities;
+  }
+
   public void setClusterId(Long clusterId) {
     this.clusterId = clusterId;
+
   }
 
   public String getCommandName() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
new file mode 100644
index 0000000..e03b3b6
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RequestResourceFilterEntity.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.orm.entities;
+
+import javax.persistence.Basic;
+import javax.persistence.Column;
+import javax.persistence.Entity;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.Lob;
+import javax.persistence.ManyToOne;
+import javax.persistence.Table;
+
+@Entity
+@Table(name = "requestresourcefilter")
+public class RequestResourceFilterEntity {
+
+  @Column(name = "request_id", nullable = false, insertable = true, updatable = true)
+  @Id
+  private Long requestId;
+
+  @Column(name = "service")
+  @Basic
+  @Id
+  private String serviceName;
+
+  @Column(name = "component")
+  @Basic
+  @Id
+  private String componentName;
+
+  @Column(name = "hosts")
+  @Lob
+  private String hosts;
+
+  @ManyToOne
+  @JoinColumn(name = "request_id", referencedColumnName = "request_id", nullable = false, insertable = false, updatable = false)
+  private RequestEntity requestEntity;
+
+  public String getServiceName() {
+    return serviceName;
+  }
+
+  public void setServiceName(String serviceName) {
+    this.serviceName = serviceName;
+  }
+
+  public String getComponentName() {
+    return componentName;
+  }
+
+  public void setComponentName(String componentName) {
+    this.componentName = componentName;
+  }
+
+  public String getHosts() {
+    return hosts;
+  }
+
+  public void setHosts(String hosts) {
+    this.hosts = hosts;
+  }
+
+  public Long getRequestId() {
+    return requestId;
+  }
+
+  public void setRequestId(Long requestId) {
+    this.requestId = requestId;
+  }
+
+  public RequestEntity getRequestEntity() {
+    return requestEntity;
+  }
+
+  public void setRequestEntity(RequestEntity request) {
+    this.requestEntity = request;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
index f104259..33e49ac 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog150.java
@@ -200,6 +200,15 @@ public class UpgradeCatalog150 extends AbstractUpgradeCatalog {
 
     dbAccessor.createTable("hostgroup_component", columns, "blueprint_name", "hostgroup_name", "name");
 
+    // RequestResourceFilter
+    columns.clear();
+    columns.add(new DBColumnInfo("request_id", Long.class, null, null, false));
+    columns.add(new DBColumnInfo("service_name", String.class, 255, null, true));
+    columns.add(new DBColumnInfo("component_name", String.class, 255, null, true));
+    columns.add(new DBColumnInfo("hosts", byte[].class, null, null, true));
+
+    dbAccessor.createTable("requestresourcefilter", columns, "request_id", "service_name", "component_name");
+
     createQuartzTables();
 
     // ========================================================================
@@ -249,6 +258,7 @@ public class UpgradeCatalog150 extends AbstractUpgradeCatalog {
     dbAccessor.addFKConstraint("confgrouphostmapping", "FK_cghostm_configgroup_id", "config_group_id", "configgroup", "group_id", true);
     dbAccessor.addFKConstraint("confgrouphostmapping", "FK_cghostm_host_name", "host_name", "hosts", "host_name", true);
     dbAccessor.addFKConstraint("clusterconfigmapping", "FK_clustercfgmap_cluster_id", "cluster_id", "clusters", "cluster_id", true);
+    dbAccessor.addFKConstraint("requestresourcefilter", "FK_requestresourcefilter_req_id", "request_id", "request", "request_id", true);
 
     // ========================================================================
     // Finally update schema version

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
index 2e97b08..2224a85 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
@@ -42,7 +42,8 @@ CREATE TABLE execution_command (task_id BIGINT NOT NULL, command LONGBLOB, PRIMA
 CREATE TABLE host_role_command (task_id BIGINT NOT NULL, attempt_count SMALLINT NOT NULL, event LONGTEXT NOT NULL, exitcode INTEGER NOT NULL, host_name VARCHAR(255) NOT NULL, last_attempt_time BIGINT NOT NULL, request_id BIGINT NOT NULL, role VARCHAR(255), role_command VARCHAR(255), stage_id BIGINT NOT NULL, start_time BIGINT NOT NULL, end_time BIGINT, status VARCHAR(255), std_error LONGBLOB, std_out LONGBLOB, structured_out LONGBLOB, command_detail VARCHAR(255), custom_command_name VARCHAR(255), PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR(255) NOT NULL, request_id BIGINT NOT NULL, stage_id BIGINT NOT NULL, success_factor DOUBLE NOT NULL, PRIMARY KEY (role, request_id, stage_id));
 CREATE TABLE stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info LONGBLOB, PRIMARY KEY (stage_id, request_id));
-CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, request_schedule_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs LONGTEXT, request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts LONGTEXT, target_service VARCHAR(255), PRIMARY KEY (request_id));
+CREATE TABLE request (request_id BIGINT NOT NULL, cluster_id BIGINT, request_schedule_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs LONGTEXT, request_context VARCHAR(255), request_type VARCHAR(255), start_time BIGINT NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
+CREATE TABLE requestresourcefilter (request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), hosts LONGTEXT, PRIMARY KEY (request_id, service_name, component_name));
 CREATE TABLE key_value_store (`key` VARCHAR(255), `value` LONGTEXT, PRIMARY KEY (`key`));
 CREATE TABLE clusterconfigmapping (type_name VARCHAR(255) NOT NULL, create_timestamp BIGINT NOT NULL, cluster_id BIGINT NOT NULL, selected INTEGER NOT NULL DEFAULT 0, version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL DEFAULT '_db', PRIMARY KEY (type_name, create_timestamp, cluster_id));
 CREATE TABLE hostconfigmapping (create_timestamp BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, cluster_id BIGINT NOT NULL, type_name VARCHAR(255) NOT NULL, selected INTEGER NOT NULL DEFAULT 0, service_name VARCHAR(255), version_tag VARCHAR(255) NOT NULL, user_name VARCHAR(255) NOT NULL DEFAULT '_db', PRIMARY KEY (create_timestamp, host_name, cluster_id, type_name));
@@ -93,6 +94,7 @@ ALTER TABLE configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmapping_host
 ALTER TABLE requestschedulebatchrequest ADD CONSTRAINT FK_requestschedulebatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
 ALTER TABLE hostgroup ADD FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name);
 ALTER TABLE hostgroup_component ADD FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES hostgroup(blueprint_name, name);
+ALTER TABLE requestresourcefilter ADD CONSTRAINT FK_requestresourcefilter_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
 
 
 INSERT INTO ambari_sequences(sequence_name, value) values ('cluster_id_seq', 1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index e0e6927..637a149 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -32,7 +32,8 @@ CREATE TABLE execution_command (task_id NUMBER(19) NOT NULL, command BLOB NULL,
 CREATE TABLE host_role_command (task_id NUMBER(19) NOT NULL, attempt_count NUMBER(5) NOT NULL, event CLOB NULL, exitcode NUMBER(10) NOT NULL, host_name VARCHAR2(255) NOT NULL, last_attempt_time NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, role VARCHAR2(255) NULL, role_command VARCHAR2(255) NULL, stage_id NUMBER(19) NOT NULL, start_time NUMBER(19) NOT NULL, end_time NUMBER(19), status VARCHAR2(255) NULL, std_error BLOB NULL, std_out BLOB NULL, structured_out BLOB NULL,  command_detail VARCHAR2(255) NULL, custom_command_name VARCHAR2(255) NULL, PRIMARY KEY (task_id));
 CREATE TABLE role_success_criteria (role VARCHAR2(255) NOT NULL, request_id NUMBER(19) NOT NULL, stage_id NUMBER(19) NOT NULL, success_factor NUMBER(19,4) NOT NULL, PRIMARY KEY (role, request_id, stage_id));
 CREATE TABLE stage (stage_id NUMBER(19) NOT NULL, request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19) NULL, log_info VARCHAR2(255) NULL, request_context VARCHAR2(255) NULL, cluster_host_info BLOB NOT NULL, PRIMARY KEY (stage_id, request_id));
-CREATE TABLE request (request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19), request_schedule_id NUMBER(19), command_name VARCHAR(255), create_time NUMBER(19) NOT NULL, end_time NUMBER(19) NOT NULL, inputs CLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time NUMBER(19) NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts CLOB, target_service VARCHAR(255), PRIMARY KEY (request_id));
+CREATE TABLE request (request_id NUMBER(19) NOT NULL, cluster_id NUMBER(19), request_schedule_id NUMBER(19), command_name VARCHAR(255), create_time NUMBER(19) NOT NULL, end_time NUMBER(19) NOT NULL, inputs CLOB, request_context VARCHAR(255), request_type VARCHAR(255), start_time NUMBER(19) NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
+CREATE TABLE requestresourcefilter (request_id NUMBER(19) NOT NULL, service_name VARCHAR2(255), component_name VARCHAR2(255), hosts CLOB, PRIMARY KEY (request_id, service_name, component_name));
 CREATE TABLE key_value_store ("key" VARCHAR2(255) NOT NULL, "value" CLOB NULL, PRIMARY KEY ("key"));
 CREATE TABLE clusterconfigmapping (type_name VARCHAR2(255) NOT NULL, create_timestamp NUMBER(19) NOT NULL, cluster_id NUMBER(19) NOT NULL, selected NUMBER(10) NOT NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', PRIMARY KEY (type_name, create_timestamp, cluster_id));
 CREATE TABLE hostconfigmapping (create_timestamp NUMBER(19) NOT NULL, host_name VARCHAR2(255) NOT NULL, cluster_id NUMBER(19) NOT NULL, type_name VARCHAR2(255) NOT NULL, selected NUMBER(10) NOT NULL, service_name VARCHAR2(255) NULL, version_tag VARCHAR2(255) NOT NULL, user_name VARCHAR(255) DEFAULT '_db', PRIMARY KEY (create_timestamp, host_name, cluster_id, type_name));
@@ -81,6 +82,7 @@ ALTER TABLE configgrouphostmapping ADD CONSTRAINT FK_cghm_hname FOREIGN KEY (hos
 ALTER TABLE requestschedulebatchrequest ADD CONSTRAINT FK_rsbatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES requestschedule (schedule_id);
 ALTER TABLE hostgroup ADD FOREIGN KEY (blueprint_name) REFERENCES ambari.blueprint(blueprint_name);
 ALTER TABLE hostgroup_component ADD FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES ambari.hostgroup(blueprint_name, name);
+ALTER TABLE requestresourcefilter ADD CONSTRAINT FK_requestresourcefilter_req_id FOREIGN KEY (request_id) REFERENCES request (request_id);
 
 INSERT INTO ambari_sequences(sequence_name, value) values ('host_role_command_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, value) values ('user_id_seq', 1);

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 60462e2..3db04b4 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -79,7 +79,10 @@ GRANT ALL PRIVILEGES ON TABLE ambari.role_success_criteria TO :username;
 CREATE TABLE ambari.stage (stage_id BIGINT NOT NULL, request_id BIGINT NOT NULL, cluster_id BIGINT NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), cluster_host_info BYTEA NOT NULL, PRIMARY KEY (stage_id, request_id));
 GRANT ALL PRIVILEGES ON TABLE ambari.stage TO :username;
 
-CREATE TABLE ambari.request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs VARCHAR(32000), request_context VARCHAR(255), request_type VARCHAR(255), request_schedule_id BIGINT, start_time BIGINT NOT NULL, status VARCHAR(255), target_component VARCHAR(255), target_hosts TEXT, target_service VARCHAR(255), PRIMARY KEY (request_id));
+CREATE TABLE ambari.request (request_id BIGINT NOT NULL, cluster_id BIGINT, command_name VARCHAR(255), create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, inputs VARCHAR(32000), request_context VARCHAR(255), request_type VARCHAR(255), request_schedule_id BIGINT, start_time BIGINT NOT NULL, status VARCHAR(255), PRIMARY KEY (request_id));
+GRANT ALL PRIVILEGES ON TABLE ambari.request TO :username;
+
+CREATE TABLE ambari.requestresourcefilter (request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), hosts TEXT, PRIMARY KEY (request_id, service_name, component_name));
 GRANT ALL PRIVILEGES ON TABLE ambari.request TO :username;
 
 CREATE TABLE ambari.ClusterHostMapping (cluster_id BIGINT NOT NULL, host_name VARCHAR(255) NOT NULL, PRIMARY KEY (cluster_id, host_name));
@@ -98,7 +101,6 @@ CREATE TABLE ambari.metainfo ("metainfo_key" VARCHAR(255), "metainfo_value" VARC
 GRANT ALL PRIVILEGES ON TABLE ambari.metainfo TO :username;
 
 CREATE TABLE ambari.ambari_sequences (sequence_name VARCHAR(255) PRIMARY KEY, "value" BIGINT NOT NULL);
-
 GRANT ALL PRIVILEGES ON TABLE ambari.ambari_sequences TO :username;
 
 CREATE TABLE ambari.configgroup (group_id BIGINT, cluster_id BIGINT NOT NULL, group_name VARCHAR(255) NOT NULL, tag VARCHAR(1024) NOT NULL, description VARCHAR(1024), create_timestamp BIGINT NOT NULL, PRIMARY KEY(group_id));
@@ -157,6 +159,7 @@ ALTER TABLE ambari.configgrouphostmapping ADD CONSTRAINT FK_configgrouphostmappi
 ALTER TABLE ambari.requestschedulebatchrequest ADD CONSTRAINT FK_requestschedulebatchrequest_schedule_id FOREIGN KEY (schedule_id) REFERENCES ambari.requestschedule (schedule_id);
 ALTER TABLE ambari.hostgroup ADD FOREIGN KEY (blueprint_name) REFERENCES ambari.blueprint(blueprint_name);
 ALTER TABLE ambari.hostgroup_component ADD FOREIGN KEY (blueprint_name, hostgroup_name) REFERENCES ambari.hostgroup(blueprint_name, name);
+ALTER TABLE ambari.requestresourcefilter ADD CONSTRAINT FK_requestresourcefilter_req_id FOREIGN KEY (request_id) REFERENCES ambari.request (request_id);
 
 
 ---------inserting some data-----------

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/META-INF/persistence.xml b/ambari-server/src/main/resources/META-INF/persistence.xml
index c85d79d..b6c1bd9 100644
--- a/ambari-server/src/main/resources/META-INF/persistence.xml
+++ b/ambari-server/src/main/resources/META-INF/persistence.xml
@@ -38,12 +38,12 @@
     <class>org.apache.ambari.server.orm.entities.ConfigGroupEntity</class>
     <class>org.apache.ambari.server.orm.entities.ConfigGroupConfigMappingEntity</class>
     <class>org.apache.ambari.server.orm.entities.ConfigGroupHostMappingEntity</class>
-    <class>org.apache.ambari.server.orm.entities.ActionEntity</class>
     <class>org.apache.ambari.server.orm.entities.RequestScheduleEntity</class>
     <class>org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity</class>
     <class>org.apache.ambari.server.orm.entities.BlueprintEntity</class>
     <class>org.apache.ambari.server.orm.entities.HostGroupEntity</class>
     <class>org.apache.ambari.server.orm.entities.HostGroupComponentEntity</class>
+    <class>org.apache.ambari.server.orm.entities.RequestResourceFilterEntity</class>
 
     <properties>
       <!--<property name="javax.persistence.jdbc.url" value="jdbc:postgresql://localhost/ambari" />-->

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json
index 270353b..d3987b5 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -103,9 +103,7 @@
         "Requests/request_schedule",
         "Requests/type",
         "Requests/inputs",
-        "Requests/target_service",
-        "Requests/target_component",
-        "Requests/target_hosts",
+        "Requests/resources",
         "Requests/create_time",
         "Requests/start_time",
         "Requests/end_time",

http://git-wip-us.apache.org/repos/asf/ambari/blob/c6987eee/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
index d052673..344891d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/actionmanager/TestActionDBAccessorImpl.java
@@ -30,6 +30,7 @@ import org.apache.ambari.server.agent.ActionQueue;
 import org.apache.ambari.server.agent.CommandReport;
 import org.apache.ambari.server.controller.ExecuteActionRequest;
 import org.apache.ambari.server.controller.HostsMap;
+import org.apache.ambari.server.controller.internal.RequestResourceFilter;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.ExecutionCommandDAO;
@@ -366,8 +367,11 @@ public class TestActionDBAccessorImpl {
             hostname, System.currentTimeMillis()), "cluster1", "HBASE");
     List<Stage> stages = new ArrayList<Stage>();
     stages.add(s);
-    ExecuteActionRequest executeActionRequest = new ExecuteActionRequest("cluster1", null, actionName, "HBASE",
-        "HBASE_MASTER", null, null);
+    final RequestResourceFilter resourceFilter = new RequestResourceFilter("HBASE", "HBASE_MASTER", null);
+    List<RequestResourceFilter> resourceFilters = new
+      ArrayList<RequestResourceFilter>() {{ add(resourceFilter); }};
+    ExecuteActionRequest executeActionRequest = new ExecuteActionRequest
+      ("cluster1", null, actionName, resourceFilters, null);
     Request request = new Request(stages, clusters);
     db.persistActions(request);
   }